]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
[multiple changes]
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code associated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fno-sjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementation, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 If the target does not use the DWARF 2 frame unwind information, at
91 program startup each object file invokes a function named
92 __register_exceptions with the address of its local
93 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94 is responsible for recording all of the exception regions into one list
95 (which is kept in a static variable named exception_table_list).
96
97 On targets that support crtstuff.c, the unwind information
98 is stored in a section named .eh_frame and the information for the
99 entire shared object or program is registered with a call to
100 __register_frame_info. On other targets, the information for each
101 translation unit is registered from the file generated by collect2.
102 __register_frame_info is defined in frame.c, and is responsible for
103 recording all of the unwind regions into one list (which is kept in a
104 static variable named unwind_table_list).
105
106 The function __throw is actually responsible for doing the
107 throw. On machines that have unwind info support, __throw is generated
108 by code in libgcc2.c, otherwise __throw is generated on a
109 per-object-file basis for each source file compiled with
110 -fexceptions by the C++ frontend. Before __throw is invoked,
111 the current context of the throw needs to be placed in the global
112 variable __eh_pc.
113
114 __throw attempts to find the appropriate exception handler for the
115 PC value stored in __eh_pc by calling __find_first_exception_table_match
116 (which is defined in libgcc2.c). If __find_first_exception_table_match
117 finds a relevant handler, __throw transfers control directly to it.
118
119 If a handler for the context being thrown from can't be found, __throw
120 walks (see Walking the stack below) the stack up the dynamic call chain to
121 continue searching for an appropriate exception handler based upon the
122 caller of the function it last sought a exception handler for. It stops
123 then either an exception handler is found, or when the top of the
124 call chain is reached.
125
126 If no handler is found, an external library function named
127 __terminate is called. If a handler is found, then we restart
128 our search for a handler at the end of the call chain, and repeat
129 the search process, but instead of just walking up the call chain,
130 we unwind the call chain as we walk up it.
131
132 Internal implementation details:
133
134 To associate a user-defined handler with a block of statements, the
135 function expand_start_try_stmts is used to mark the start of the
136 block of statements with which the handler is to be associated
137 (which is known as a "try block"). All statements that appear
138 afterwards will be associated with the try block.
139
140 A call to expand_start_all_catch marks the end of the try block,
141 and also marks the start of the "catch block" (the user-defined
142 handler) associated with the try block.
143
144 This user-defined handler will be invoked for *every* exception
145 thrown with the context of the try block. It is up to the handler
146 to decide whether or not it wishes to handle any given exception,
147 as there is currently no mechanism in this implementation for doing
148 this. (There are plans for conditionally processing an exception
149 based on its "type", which will provide a language-independent
150 mechanism).
151
152 If the handler chooses not to process the exception (perhaps by
153 looking at an "exception type" or some other additional data
154 supplied with the exception), it can fall through to the end of the
155 handler. expand_end_all_catch and expand_leftover_cleanups
156 add additional code to the end of each handler to take care of
157 rethrowing to the outer exception handler.
158
159 The handler also has the option to continue with "normal flow of
160 code", or in other words to resume executing at the statement
161 immediately after the end of the exception region. The variable
162 caught_return_label_stack contains a stack of labels, and jumping
163 to the topmost entry's label via expand_goto will resume normal
164 flow to the statement immediately after the end of the exception
165 region. If the handler falls through to the end, the exception will
166 be rethrown to the outer exception region.
167
168 The instructions for the catch block are kept as a separate
169 sequence, and will be emitted at the end of the function along with
170 the handlers specified via expand_eh_region_end. The end of the
171 catch block is marked with expand_end_all_catch.
172
173 Any data associated with the exception must currently be handled by
174 some external mechanism maintained in the frontend. For example,
175 the C++ exception mechanism passes an arbitrary value along with
176 the exception, and this is handled in the C++ frontend by using a
177 global variable to hold the value. (This will be changing in the
178 future.)
179
180 The mechanism in C++ for handling data associated with the
181 exception is clearly not thread-safe. For a thread-based
182 environment, another mechanism must be used (possibly using a
183 per-thread allocation mechanism if the size of the area that needs
184 to be allocated isn't known at compile time.)
185
186 Internally-generated exception regions (cleanups) are marked by
187 calling expand_eh_region_start to mark the start of the region,
188 and expand_eh_region_end (handler) is used to both designate the
189 end of the region and to associate a specified handler/cleanup with
190 the region. The rtl code in HANDLER will be invoked whenever an
191 exception occurs in the region between the calls to
192 expand_eh_region_start and expand_eh_region_end. After HANDLER is
193 executed, additional code is emitted to handle rethrowing the
194 exception to the outer exception handler. The code for HANDLER will
195 be emitted at the end of the function.
196
197 TARGET_EXPRs can also be used to designate exception regions. A
198 TARGET_EXPR gives an unwind-protect style interface commonly used
199 in functional languages such as LISP. The associated expression is
200 evaluated, and whether or not it (or any of the functions that it
201 calls) throws an exception, the protect expression is always
202 invoked. This implementation takes care of the details of
203 associating an exception table entry with the expression and
204 generating the necessary code (it actually emits the protect
205 expression twice, once for normal flow and once for the exception
206 case). As for the other handlers, the code for the exception case
207 will be emitted at the end of the function.
208
209 Cleanups can also be specified by using add_partial_entry (handler)
210 and end_protect_partials. add_partial_entry creates the start of
211 a new exception region; HANDLER will be invoked if an exception is
212 thrown with the context of the region between the calls to
213 add_partial_entry and end_protect_partials. end_protect_partials is
214 used to mark the end of these regions. add_partial_entry can be
215 called as many times as needed before calling end_protect_partials.
216 However, end_protect_partials should only be invoked once for each
217 group of calls to add_partial_entry as the entries are queued
218 and all of the outstanding entries are processed simultaneously
219 when end_protect_partials is invoked. Similarly to the other
220 handlers, the code for HANDLER will be emitted at the end of the
221 function.
222
223 The generated RTL for an exception region includes
224 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225 the start and end of the exception region. A unique label is also
226 generated at the start of the exception region, which is available
227 by looking at the ehstack variable. The topmost entry corresponds
228 to the current region.
229
230 In the current implementation, an exception can only be thrown from
231 a function call (since the mechanism used to actually throw an
232 exception involves calling __throw). If an exception region is
233 created but no function calls occur within that region, the region
234 can be safely optimized away (along with its exception handlers)
235 since no exceptions can ever be caught in that region. This
236 optimization is performed unless -fasynchronous-exceptions is
237 given. If the user wishes to throw from a signal handler, or other
238 asynchronous place, -fasynchronous-exceptions should be used when
239 compiling for maximally correct code, at the cost of additional
240 exception regions. Using -fasynchronous-exceptions only produces
241 code that is reasonably safe in such situations, but a correct
242 program cannot rely upon this working. It can be used in failsafe
243 code, where trying to continue on, and proceeding with potentially
244 incorrect results is better than halting the program.
245
246
247 Walking the stack:
248
249 The stack is walked by starting with a pointer to the current
250 frame, and finding the pointer to the callers frame. The unwind info
251 tells __throw how to find it.
252
253 Unwinding the stack:
254
255 When we use the term unwinding the stack, we mean undoing the
256 effects of the function prologue in a controlled fashion so that we
257 still have the flow of control. Otherwise, we could just return
258 (jump to the normal end of function epilogue).
259
260 This is done in __throw in libgcc2.c when we know that a handler exists
261 in a frame higher up the call stack than its immediate caller.
262
263 To unwind, we find the unwind data associated with the frame, if any.
264 If we don't find any, we call the library routine __terminate. If we do
265 find it, we use the information to copy the saved register values from
266 that frame into the register save area in the frame for __throw, return
267 into a stub which updates the stack pointer, and jump to the handler.
268 The normal function epilogue for __throw handles restoring the saved
269 values into registers.
270
271 When unwinding, we use this method if we know it will
272 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
273 an inline unwinder will have been emitted for any function that
274 __unwind_function cannot unwind. The inline unwinder appears as a
275 normal exception handler for the entire function, for any function
276 that we know cannot be unwound by __unwind_function. We inform the
277 compiler of whether a function can be unwound with
278 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279 when the unwinder isn't needed. __unwind_function is used as an
280 action of last resort. If no other method can be used for
281 unwinding, __unwind_function is used. If it cannot unwind, it
282 should call __terminate.
283
284 By default, if the target-specific backend doesn't supply a definition
285 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286 unwinders will be used instead. The main tradeoff here is in text space
287 utilization. Obviously, if inline unwinders have to be generated
288 repeatedly, this uses much more space than if a single routine is used.
289
290 However, it is simply not possible on some platforms to write a
291 generalized routine for doing stack unwinding without having some
292 form of additional data associated with each function. The current
293 implementation can encode this data in the form of additional
294 machine instructions or as static data in tabular form. The later
295 is called the unwind data.
296
297 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299 defined and has a non-zero value, a per-function unwinder is not emitted
300 for the current function. If the static unwind data is supported, then
301 a per-function unwinder is not emitted.
302
303 On some platforms it is possible that neither __unwind_function
304 nor inlined unwinders are available. For these platforms it is not
305 possible to throw through a function call, and abort will be
306 invoked instead of performing the throw.
307
308 The reason the unwind data may be needed is that on some platforms
309 the order and types of data stored on the stack can vary depending
310 on the type of function, its arguments and returned values, and the
311 compilation options used (optimization versus non-optimization,
312 -fomit-frame-pointer, processor variations, etc).
313
314 Unfortunately, this also means that throwing through functions that
315 aren't compiled with exception handling support will still not be
316 possible on some platforms. This problem is currently being
317 investigated, but no solutions have been found that do not imply
318 some unacceptable performance penalties.
319
320 Future directions:
321
322 Currently __throw makes no differentiation between cleanups and
323 user-defined exception regions. While this makes the implementation
324 simple, it also implies that it is impossible to determine if a
325 user-defined exception handler exists for a given exception without
326 completely unwinding the stack in the process. This is undesirable
327 from the standpoint of debugging, as ideally it would be possible
328 to trap unhandled exceptions in the debugger before the process of
329 unwinding has even started.
330
331 This problem can be solved by marking user-defined handlers in a
332 special way (probably by adding additional bits to exception_table_list).
333 A two-pass scheme could then be used by __throw to iterate
334 through the table. The first pass would search for a relevant
335 user-defined handler for the current context of the throw, and if
336 one is found, the second pass would then invoke all needed cleanups
337 before jumping to the user-defined handler.
338
339 Many languages (including C++ and Ada) make execution of a
340 user-defined handler conditional on the "type" of the exception
341 thrown. (The type of the exception is actually the type of the data
342 that is thrown with the exception.) It will thus be necessary for
343 __throw to be able to determine if a given user-defined
344 exception handler will actually be executed, given the type of
345 exception.
346
347 One scheme is to add additional information to exception_table_list
348 as to the types of exceptions accepted by each handler. __throw
349 can do the type comparisons and then determine if the handler is
350 actually going to be executed.
351
352 There is currently no significant level of debugging support
353 available, other than to place a breakpoint on __throw. While
354 this is sufficient in most cases, it would be helpful to be able to
355 know where a given exception was going to be thrown to before it is
356 actually thrown, and to be able to choose between stopping before
357 every exception region (including cleanups), or just user-defined
358 exception regions. This should be possible to do in the two-pass
359 scheme by adding additional labels to __throw for appropriate
360 breakpoints, and additional debugger commands could be added to
361 query various state variables to determine what actions are to be
362 performed next.
363
364 Another major problem that is being worked on is the issue with stack
365 unwinding on various platforms. Currently the only platforms that have
366 support for the generation of a generic unwinder are the SPARC and MIPS.
367 All other ports require per-function unwinders, which produce large
368 amounts of code bloat.
369
370 For setjmp/longjmp based exception handling, some of the details
371 are as above, but there are some additional details. This section
372 discusses the details.
373
374 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
375 optimize EH regions yet. We don't have to worry about machine
376 specific issues with unwinding the stack, as we rely upon longjmp
377 for all the machine specific details. There is no variable context
378 of a throw, just the one implied by the dynamic handler stack
379 pointed to by the dynamic handler chain. There is no exception
380 table, and no calls to __register_exceptions. __sjthrow is used
381 instead of __throw, and it works by using the dynamic handler
382 chain, and longjmp. -fasynchronous-exceptions has no effect, as
383 the elimination of trivial exception regions is not yet performed.
384
385 A frontend can set protect_cleanup_actions_with_terminate when all
386 the cleanup actions should be protected with an EH region that
387 calls terminate when an unhandled exception is throw. C++ does
388 this, Ada does not. */
389
390
391 #include "config.h"
392 #include "defaults.h"
393 #include "eh-common.h"
394 #include "system.h"
395 #include "rtl.h"
396 #include "tree.h"
397 #include "flags.h"
398 #include "except.h"
399 #include "function.h"
400 #include "insn-flags.h"
401 #include "expr.h"
402 #include "insn-codes.h"
403 #include "regs.h"
404 #include "hard-reg-set.h"
405 #include "insn-config.h"
406 #include "recog.h"
407 #include "output.h"
408 #include "toplev.h"
409
410 /* One to use setjmp/longjmp method of generating code for exception
411 handling. */
412
413 int exceptions_via_longjmp = 2;
414
415 /* One to enable asynchronous exception support. */
416
417 int asynchronous_exceptions = 0;
418
419 /* One to protect cleanup actions with a handler that calls
420 __terminate, zero otherwise. */
421
422 int protect_cleanup_actions_with_terminate;
423
424 /* A list of labels used for exception handlers. Created by
425 find_exception_handler_labels for the optimization passes. */
426
427 rtx exception_handler_labels;
428
429 /* The EH context. Nonzero if the function has already
430 fetched a pointer to the EH context for exception handling. */
431
432 rtx current_function_ehc;
433
434 /* A stack used for keeping track of the currently active exception
435 handling region. As each exception region is started, an entry
436 describing the region is pushed onto this stack. The current
437 region can be found by looking at the top of the stack, and as we
438 exit regions, the corresponding entries are popped.
439
440 Entries cannot overlap; they can be nested. So there is only one
441 entry at most that corresponds to the current instruction, and that
442 is the entry on the top of the stack. */
443
444 static struct eh_stack ehstack;
445
446
447 /* This stack is used to represent what the current eh region is
448 for the catch blocks beings processed */
449
450 static struct eh_stack catchstack;
451
452 /* A queue used for tracking which exception regions have closed but
453 whose handlers have not yet been expanded. Regions are emitted in
454 groups in an attempt to improve paging performance.
455
456 As we exit a region, we enqueue a new entry. The entries are then
457 dequeued during expand_leftover_cleanups and expand_start_all_catch,
458
459 We should redo things so that we either take RTL for the handler,
460 or we expand the handler expressed as a tree immediately at region
461 end time. */
462
463 static struct eh_queue ehqueue;
464
465 /* Insns for all of the exception handlers for the current function.
466 They are currently emitted by the frontend code. */
467
468 rtx catch_clauses;
469
470 /* A TREE_CHAINed list of handlers for regions that are not yet
471 closed. The TREE_VALUE of each entry contains the handler for the
472 corresponding entry on the ehstack. */
473
474 static tree protect_list;
475
476 /* Stacks to keep track of various labels. */
477
478 /* Keeps track of the label to resume to should one want to resume
479 normal control flow out of a handler (instead of, say, returning to
480 the caller of the current function or exiting the program). */
481
482 struct label_node *caught_return_label_stack = NULL;
483
484 /* Keeps track of the label used as the context of a throw to rethrow an
485 exception to the outer exception region. */
486
487 struct label_node *outer_context_label_stack = NULL;
488
489 /* A random data area for the front end's own use. */
490
491 struct label_node *false_label_stack = NULL;
492
493 static void push_eh_entry PROTO((struct eh_stack *));
494 static struct eh_entry * pop_eh_entry PROTO((struct eh_stack *));
495 static void enqueue_eh_entry PROTO((struct eh_queue *, struct eh_entry *));
496 static struct eh_entry * dequeue_eh_entry PROTO((struct eh_queue *));
497 static rtx call_get_eh_context PROTO((void));
498 static void start_dynamic_cleanup PROTO((tree, tree));
499 static void start_dynamic_handler PROTO((void));
500 static void expand_rethrow PROTO((rtx));
501 static void output_exception_table_entry PROTO((FILE *, int));
502 static int can_throw PROTO((rtx));
503 static rtx scan_region PROTO((rtx, int, int *));
504 static void eh_regs PROTO((rtx *, rtx *, int));
505 static void set_insn_eh_region PROTO((rtx *, int));
506 #ifdef DONT_USE_BUILTIN_SETJMP
507 static void jumpif_rtx PROTO((rtx, rtx));
508 #endif
509
510
511 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
512 \f
513 /* Various support routines to manipulate the various data structures
514 used by the exception handling code. */
515
516 /* Push a label entry onto the given STACK. */
517
518 void
519 push_label_entry (stack, rlabel, tlabel)
520 struct label_node **stack;
521 rtx rlabel;
522 tree tlabel;
523 {
524 struct label_node *newnode
525 = (struct label_node *) xmalloc (sizeof (struct label_node));
526
527 if (rlabel)
528 newnode->u.rlabel = rlabel;
529 else
530 newnode->u.tlabel = tlabel;
531 newnode->chain = *stack;
532 *stack = newnode;
533 }
534
535 /* Pop a label entry from the given STACK. */
536
537 rtx
538 pop_label_entry (stack)
539 struct label_node **stack;
540 {
541 rtx label;
542 struct label_node *tempnode;
543
544 if (! *stack)
545 return NULL_RTX;
546
547 tempnode = *stack;
548 label = tempnode->u.rlabel;
549 *stack = (*stack)->chain;
550 free (tempnode);
551
552 return label;
553 }
554
555 /* Return the top element of the given STACK. */
556
557 tree
558 top_label_entry (stack)
559 struct label_node **stack;
560 {
561 if (! *stack)
562 return NULL_TREE;
563
564 return (*stack)->u.tlabel;
565 }
566
567 /* get an exception label. These must be on the permanent obstack */
568
569 rtx
570 gen_exception_label ()
571 {
572 rtx lab;
573
574 push_obstacks_nochange ();
575 end_temporary_allocation ();
576 lab = gen_label_rtx ();
577 pop_obstacks ();
578 return lab;
579 }
580
581 /* Push a new eh_node entry onto STACK. */
582
583 static void
584 push_eh_entry (stack)
585 struct eh_stack *stack;
586 {
587 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
588 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
589
590 entry->outer_context = gen_label_rtx ();
591 entry->finalization = NULL_TREE;
592 entry->label_used = 0;
593 entry->exception_handler_label = gen_exception_label ();
594
595 node->entry = entry;
596 node->chain = stack->top;
597 stack->top = node;
598 }
599
600 /* push an existing entry onto a stack. */
601 static void
602 push_entry (stack, entry)
603 struct eh_stack *stack;
604 struct eh_entry *entry;
605 {
606 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
607 node->entry = entry;
608 node->chain = stack->top;
609 stack->top = node;
610 }
611
612 /* Pop an entry from the given STACK. */
613
614 static struct eh_entry *
615 pop_eh_entry (stack)
616 struct eh_stack *stack;
617 {
618 struct eh_node *tempnode;
619 struct eh_entry *tempentry;
620
621 tempnode = stack->top;
622 tempentry = tempnode->entry;
623 stack->top = stack->top->chain;
624 free (tempnode);
625
626 return tempentry;
627 }
628
629 /* Enqueue an ENTRY onto the given QUEUE. */
630
631 static void
632 enqueue_eh_entry (queue, entry)
633 struct eh_queue *queue;
634 struct eh_entry *entry;
635 {
636 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
637
638 node->entry = entry;
639 node->chain = NULL;
640
641 if (queue->head == NULL)
642 {
643 queue->head = node;
644 }
645 else
646 {
647 queue->tail->chain = node;
648 }
649 queue->tail = node;
650 }
651
652 /* Dequeue an entry from the given QUEUE. */
653
654 static struct eh_entry *
655 dequeue_eh_entry (queue)
656 struct eh_queue *queue;
657 {
658 struct eh_node *tempnode;
659 struct eh_entry *tempentry;
660
661 if (queue->head == NULL)
662 return NULL;
663
664 tempnode = queue->head;
665 queue->head = queue->head->chain;
666
667 tempentry = tempnode->entry;
668 free (tempnode);
669
670 return tempentry;
671 }
672
673 static void
674 receive_exception_label (handler_label)
675 rtx handler_label;
676 {
677 emit_label (handler_label);
678
679 #ifdef HAVE_exception_receiver
680 if (! exceptions_via_longjmp)
681 if (HAVE_exception_receiver)
682 emit_insn (gen_exception_receiver ());
683 #endif
684
685 #ifdef HAVE_nonlocal_goto_receiver
686 if (! exceptions_via_longjmp)
687 if (HAVE_nonlocal_goto_receiver)
688 emit_insn (gen_nonlocal_goto_receiver ());
689 #endif
690 }
691
692
693 struct func_eh_entry
694 {
695 int range_number; /* EH region number from EH NOTE insn's */
696 struct handler_info *handlers;
697 };
698
699
700 /* table of function eh regions */
701 static struct func_eh_entry *function_eh_regions = NULL;
702 static int num_func_eh_entries = 0;
703 static int current_func_eh_entry = 0;
704
705 #define SIZE_FUNC_EH(X) (sizeof (struct func_eh_entry) * X)
706
707 /* Add a new eh_entry for this function, and base it off of the information
708 in the EH_ENTRY parameter. A NULL parameter is invalid. The number
709 returned is an number which uniquely identifies this exception range. */
710
711 int
712 new_eh_region_entry (note_eh_region)
713 int note_eh_region;
714 {
715 if (current_func_eh_entry == num_func_eh_entries)
716 {
717 if (num_func_eh_entries == 0)
718 {
719 function_eh_regions =
720 (struct func_eh_entry *) malloc (SIZE_FUNC_EH (50));
721 num_func_eh_entries = 50;
722 }
723 else
724 {
725 num_func_eh_entries = num_func_eh_entries * 3 / 2;
726 function_eh_regions = (struct func_eh_entry *)
727 realloc (function_eh_regions, SIZE_FUNC_EH (num_func_eh_entries));
728 }
729 }
730 function_eh_regions[current_func_eh_entry].range_number = note_eh_region;
731 function_eh_regions[current_func_eh_entry].handlers = NULL;
732
733 return current_func_eh_entry++;
734 }
735
736 /* Add new handler information to an exception range. The first parameter
737 specifies the range number (returned from new_eh_entry()). The second
738 parameter specifies the handler. By default the handler is inserted at
739 the end of the list. A handler list may contain only ONE NULL_TREE
740 typeinfo entry. Regardless where it is positioned, a NULL_TREE entry
741 is always output as the LAST handler in the exception table for a region. */
742
743 void
744 add_new_handler (region, newhandler)
745 int region;
746 struct handler_info *newhandler;
747 {
748 struct handler_info *last;
749
750 newhandler->next = NULL;
751 last = function_eh_regions[region].handlers;
752 if (last == NULL)
753 function_eh_regions[region].handlers = newhandler;
754 else
755 {
756 for ( ; last->next != NULL; last = last->next)
757 ;
758 last->next = newhandler;
759 }
760 }
761
762 /* Remove a handler label. The handler label is being deleted, so all
763 regions which reference this handler should have it removed from their
764 list of possible handlers. Any region which has the final handler
765 removed can be deleted. */
766
767 void remove_handler (removing_label)
768 rtx removing_label;
769 {
770 struct handler_info *handler, *last;
771 int x;
772 for (x = 0 ; x < current_func_eh_entry; ++x)
773 {
774 last = NULL;
775 handler = function_eh_regions[x].handlers;
776 for ( ; handler; last = handler, handler = handler->next)
777 if (handler->handler_label == removing_label)
778 {
779 if (last)
780 {
781 last->next = handler->next;
782 handler = last;
783 }
784 else
785 function_eh_regions[x].handlers = handler->next;
786 }
787 }
788 }
789
790 /* This function will return a malloc'd pointer to an array of
791 void pointer representing the runtime match values that
792 currently exist in all regions. */
793
794 int
795 find_all_handler_type_matches (void ***array)
796 {
797 struct handler_info *handler, *last;
798 int x,y;
799 void *val;
800 void **ptr;
801 int max_ptr;
802 int n_ptr = 0;
803
804 *array = NULL;
805
806 if (!doing_eh (0) || ! flag_new_exceptions)
807 return 0;
808
809 max_ptr = 100;
810 ptr = (void **)malloc (max_ptr * sizeof (void *));
811
812 if (ptr == NULL)
813 return 0;
814
815 for (x = 0 ; x < current_func_eh_entry; x++)
816 {
817 last = NULL;
818 handler = function_eh_regions[x].handlers;
819 for ( ; handler; last = handler, handler = handler->next)
820 {
821 val = handler->type_info;
822 if (val != NULL && val != CATCH_ALL_TYPE)
823 {
824 /* See if this match value has already been found. */
825 for (y = 0; y < n_ptr; y++)
826 if (ptr[y] == val)
827 break;
828
829 /* If we break early, we already found this value. */
830 if (y < n_ptr)
831 continue;
832
833 /* Do we need to allocate more space? */
834 if (n_ptr >= max_ptr)
835 {
836 max_ptr += max_ptr / 2;
837 ptr = (void **)realloc (ptr, max_ptr * sizeof (void *));
838 if (ptr == NULL)
839 return 0;
840 }
841 ptr[n_ptr] = val;
842 n_ptr++;
843 }
844 }
845 }
846 *array = ptr;
847 return n_ptr;
848 }
849
850 /* Create a new handler structure initialized with the handler label and
851 typeinfo fields passed in. */
852
853 struct handler_info *
854 get_new_handler (handler, typeinfo)
855 rtx handler;
856 void *typeinfo;
857 {
858 struct handler_info* ptr;
859 ptr = (struct handler_info *) malloc (sizeof (struct handler_info));
860 ptr->handler_label = handler;
861 ptr->type_info = typeinfo;
862 ptr->next = NULL;
863
864 return ptr;
865 }
866
867
868
869 /* Find the index in function_eh_regions associated with a NOTE region. If
870 the region cannot be found, a -1 is returned. This should never happen! */
871
872 int
873 find_func_region (insn_region)
874 int insn_region;
875 {
876 int x;
877 for (x = 0; x < current_func_eh_entry; x++)
878 if (function_eh_regions[x].range_number == insn_region)
879 return x;
880
881 return -1;
882 }
883
884 /* Get a pointer to the first handler in an exception region's list. */
885
886 struct handler_info *
887 get_first_handler (region)
888 int region;
889 {
890 return function_eh_regions[find_func_region (region)].handlers;
891 }
892
893 /* Clean out the function_eh_region table and free all memory */
894
895 static void
896 clear_function_eh_region ()
897 {
898 int x;
899 struct handler_info *ptr, *next;
900 for (x = 0; x < current_func_eh_entry; x++)
901 for (ptr = function_eh_regions[x].handlers; ptr != NULL; ptr = next)
902 {
903 next = ptr->next;
904 free (ptr);
905 }
906 free (function_eh_regions);
907 num_func_eh_entries = 0;
908 current_func_eh_entry = 0;
909 }
910
911 /* Make a duplicate of an exception region by copying all the handlers
912 for an exception region. Return the new handler index. */
913
914 int
915 duplicate_handlers (old_note_eh_region, new_note_eh_region)
916 int old_note_eh_region, new_note_eh_region;
917 {
918 struct handler_info *ptr, *new_ptr;
919 int new_region, region;
920
921 region = find_func_region (old_note_eh_region);
922 if (region == -1)
923 error ("Cannot duplicate non-existant exception region.");
924
925 if (find_func_region (new_note_eh_region) != -1)
926 error ("Cannot duplicate EH region because new note region already exists");
927
928 new_region = new_eh_region_entry (new_note_eh_region);
929 ptr = function_eh_regions[region].handlers;
930
931 for ( ; ptr; ptr = ptr->next)
932 {
933 new_ptr = get_new_handler (ptr->handler_label, ptr->type_info);
934 add_new_handler (new_region, new_ptr);
935 }
936
937 return new_region;
938 }
939
940 \f
941 /* Routine to see if exception handling is turned on.
942 DO_WARN is non-zero if we want to inform the user that exception
943 handling is turned off.
944
945 This is used to ensure that -fexceptions has been specified if the
946 compiler tries to use any exception-specific functions. */
947
948 int
949 doing_eh (do_warn)
950 int do_warn;
951 {
952 if (! flag_exceptions)
953 {
954 static int warned = 0;
955 if (! warned && do_warn)
956 {
957 error ("exception handling disabled, use -fexceptions to enable");
958 warned = 1;
959 }
960 return 0;
961 }
962 return 1;
963 }
964
965 /* Given a return address in ADDR, determine the address we should use
966 to find the corresponding EH region. */
967
968 rtx
969 eh_outer_context (addr)
970 rtx addr;
971 {
972 /* First mask out any unwanted bits. */
973 #ifdef MASK_RETURN_ADDR
974 expand_and (addr, MASK_RETURN_ADDR, addr);
975 #endif
976
977 /* Then adjust to find the real return address. */
978 #if defined (RETURN_ADDR_OFFSET)
979 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
980 #endif
981
982 return addr;
983 }
984
985 /* Start a new exception region for a region of code that has a
986 cleanup action and push the HANDLER for the region onto
987 protect_list. All of the regions created with add_partial_entry
988 will be ended when end_protect_partials is invoked. */
989
990 void
991 add_partial_entry (handler)
992 tree handler;
993 {
994 expand_eh_region_start ();
995
996 /* Make sure the entry is on the correct obstack. */
997 push_obstacks_nochange ();
998 resume_temporary_allocation ();
999
1000 /* Because this is a cleanup action, we may have to protect the handler
1001 with __terminate. */
1002 handler = protect_with_terminate (handler);
1003
1004 protect_list = tree_cons (NULL_TREE, handler, protect_list);
1005 pop_obstacks ();
1006 }
1007
1008 /* Emit code to get EH context to current function. */
1009
1010 static rtx
1011 call_get_eh_context ()
1012 {
1013 static tree fn;
1014 tree expr;
1015
1016 if (fn == NULL_TREE)
1017 {
1018 tree fntype;
1019 fn = get_identifier ("__get_eh_context");
1020 push_obstacks_nochange ();
1021 end_temporary_allocation ();
1022 fntype = build_pointer_type (build_pointer_type
1023 (build_pointer_type (void_type_node)));
1024 fntype = build_function_type (fntype, NULL_TREE);
1025 fn = build_decl (FUNCTION_DECL, fn, fntype);
1026 DECL_EXTERNAL (fn) = 1;
1027 TREE_PUBLIC (fn) = 1;
1028 DECL_ARTIFICIAL (fn) = 1;
1029 TREE_READONLY (fn) = 1;
1030 make_decl_rtl (fn, NULL_PTR, 1);
1031 assemble_external (fn);
1032 pop_obstacks ();
1033 }
1034
1035 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1036 expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1037 expr, NULL_TREE, NULL_TREE);
1038 TREE_SIDE_EFFECTS (expr) = 1;
1039
1040 return copy_to_reg (expand_expr (expr, NULL_RTX, VOIDmode, 0));
1041 }
1042
1043 /* Get a reference to the EH context.
1044 We will only generate a register for the current function EH context here,
1045 and emit a USE insn to mark that this is a EH context register.
1046
1047 Later, emit_eh_context will emit needed call to __get_eh_context
1048 in libgcc2, and copy the value to the register we have generated. */
1049
1050 rtx
1051 get_eh_context ()
1052 {
1053 if (current_function_ehc == 0)
1054 {
1055 rtx insn;
1056
1057 current_function_ehc = gen_reg_rtx (Pmode);
1058
1059 insn = gen_rtx_USE (GET_MODE (current_function_ehc),
1060 current_function_ehc);
1061 insn = emit_insn_before (insn, get_first_nonparm_insn ());
1062
1063 REG_NOTES (insn)
1064 = gen_rtx_EXPR_LIST (REG_EH_CONTEXT, current_function_ehc,
1065 REG_NOTES (insn));
1066 }
1067 return current_function_ehc;
1068 }
1069
1070 /* Get a reference to the dynamic handler chain. It points to the
1071 pointer to the next element in the dynamic handler chain. It ends
1072 when there are no more elements in the dynamic handler chain, when
1073 the value is &top_elt from libgcc2.c. Immediately after the
1074 pointer, is an area suitable for setjmp/longjmp when
1075 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
1076 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
1077 isn't defined. */
1078
1079 rtx
1080 get_dynamic_handler_chain ()
1081 {
1082 rtx ehc, dhc, result;
1083
1084 ehc = get_eh_context ();
1085
1086 /* This is the offset of dynamic_handler_chain in the eh_context struct
1087 declared in eh-common.h. If its location is change, change this offset */
1088 dhc = plus_constant (ehc, POINTER_SIZE / BITS_PER_UNIT);
1089
1090 result = copy_to_reg (dhc);
1091
1092 /* We don't want a copy of the dcc, but rather, the single dcc. */
1093 return gen_rtx_MEM (Pmode, result);
1094 }
1095
1096 /* Get a reference to the dynamic cleanup chain. It points to the
1097 pointer to the next element in the dynamic cleanup chain.
1098 Immediately after the pointer, are two Pmode variables, one for a
1099 pointer to a function that performs the cleanup action, and the
1100 second, the argument to pass to that function. */
1101
1102 rtx
1103 get_dynamic_cleanup_chain ()
1104 {
1105 rtx dhc, dcc, result;
1106
1107 dhc = get_dynamic_handler_chain ();
1108 dcc = plus_constant (dhc, POINTER_SIZE / BITS_PER_UNIT);
1109
1110 result = copy_to_reg (dcc);
1111
1112 /* We don't want a copy of the dcc, but rather, the single dcc. */
1113 return gen_rtx_MEM (Pmode, result);
1114 }
1115
1116 #ifdef DONT_USE_BUILTIN_SETJMP
1117 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
1118 LABEL is an rtx of code CODE_LABEL, in this function. */
1119
1120 static void
1121 jumpif_rtx (x, label)
1122 rtx x;
1123 rtx label;
1124 {
1125 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
1126 }
1127 #endif
1128
1129 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
1130 We just need to create an element for the cleanup list, and push it
1131 into the chain.
1132
1133 A dynamic cleanup is a cleanup action implied by the presence of an
1134 element on the EH runtime dynamic cleanup stack that is to be
1135 performed when an exception is thrown. The cleanup action is
1136 performed by __sjthrow when an exception is thrown. Only certain
1137 actions can be optimized into dynamic cleanup actions. For the
1138 restrictions on what actions can be performed using this routine,
1139 see expand_eh_region_start_tree. */
1140
1141 static void
1142 start_dynamic_cleanup (func, arg)
1143 tree func;
1144 tree arg;
1145 {
1146 rtx dcc;
1147 rtx new_func, new_arg;
1148 rtx x, buf;
1149 int size;
1150
1151 /* We allocate enough room for a pointer to the function, and
1152 one argument. */
1153 size = 2;
1154
1155 /* XXX, FIXME: The stack space allocated this way is too long lived,
1156 but there is no allocation routine that allocates at the level of
1157 the last binding contour. */
1158 buf = assign_stack_local (BLKmode,
1159 GET_MODE_SIZE (Pmode)*(size+1),
1160 0);
1161
1162 buf = change_address (buf, Pmode, NULL_RTX);
1163
1164 /* Store dcc into the first word of the newly allocated buffer. */
1165
1166 dcc = get_dynamic_cleanup_chain ();
1167 emit_move_insn (buf, dcc);
1168
1169 /* Store func and arg into the cleanup list element. */
1170
1171 new_func = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
1172 GET_MODE_SIZE (Pmode)));
1173 new_arg = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
1174 GET_MODE_SIZE (Pmode)*2));
1175 x = expand_expr (func, new_func, Pmode, 0);
1176 if (x != new_func)
1177 emit_move_insn (new_func, x);
1178
1179 x = expand_expr (arg, new_arg, Pmode, 0);
1180 if (x != new_arg)
1181 emit_move_insn (new_arg, x);
1182
1183 /* Update the cleanup chain. */
1184
1185 emit_move_insn (dcc, XEXP (buf, 0));
1186 }
1187
1188 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
1189 handler stack. This should only be used by expand_eh_region_start
1190 or expand_eh_region_start_tree. */
1191
1192 static void
1193 start_dynamic_handler ()
1194 {
1195 rtx dhc, dcc;
1196 rtx x, arg, buf;
1197 int size;
1198
1199 #ifndef DONT_USE_BUILTIN_SETJMP
1200 /* The number of Pmode words for the setjmp buffer, when using the
1201 builtin setjmp/longjmp, see expand_builtin, case
1202 BUILT_IN_LONGJMP. */
1203 size = 5;
1204 #else
1205 #ifdef JMP_BUF_SIZE
1206 size = JMP_BUF_SIZE;
1207 #else
1208 /* Should be large enough for most systems, if it is not,
1209 JMP_BUF_SIZE should be defined with the proper value. It will
1210 also tend to be larger than necessary for most systems, a more
1211 optimal port will define JMP_BUF_SIZE. */
1212 size = FIRST_PSEUDO_REGISTER+2;
1213 #endif
1214 #endif
1215 /* XXX, FIXME: The stack space allocated this way is too long lived,
1216 but there is no allocation routine that allocates at the level of
1217 the last binding contour. */
1218 arg = assign_stack_local (BLKmode,
1219 GET_MODE_SIZE (Pmode)*(size+1),
1220 0);
1221
1222 arg = change_address (arg, Pmode, NULL_RTX);
1223
1224 /* Store dhc into the first word of the newly allocated buffer. */
1225
1226 dhc = get_dynamic_handler_chain ();
1227 dcc = gen_rtx_MEM (Pmode, plus_constant (XEXP (arg, 0),
1228 GET_MODE_SIZE (Pmode)));
1229 emit_move_insn (arg, dhc);
1230
1231 /* Zero out the start of the cleanup chain. */
1232 emit_move_insn (dcc, const0_rtx);
1233
1234 /* The jmpbuf starts two words into the area allocated. */
1235 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
1236
1237 #ifdef DONT_USE_BUILTIN_SETJMP
1238 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
1239 buf, Pmode);
1240 /* If we come back here for a catch, transfer control to the handler. */
1241 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
1242 #else
1243 {
1244 /* A label to continue execution for the no exception case. */
1245 rtx noex = gen_label_rtx();
1246 x = expand_builtin_setjmp (buf, NULL_RTX, noex,
1247 ehstack.top->entry->exception_handler_label);
1248 emit_label (noex);
1249 }
1250 #endif
1251
1252 /* We are committed to this, so update the handler chain. */
1253
1254 emit_move_insn (dhc, XEXP (arg, 0));
1255 }
1256
1257 /* Start an exception handling region for the given cleanup action.
1258 All instructions emitted after this point are considered to be part
1259 of the region until expand_eh_region_end is invoked. CLEANUP is
1260 the cleanup action to perform. The return value is true if the
1261 exception region was optimized away. If that case,
1262 expand_eh_region_end does not need to be called for this cleanup,
1263 nor should it be.
1264
1265 This routine notices one particular common case in C++ code
1266 generation, and optimizes it so as to not need the exception
1267 region. It works by creating a dynamic cleanup action, instead of
1268 a using an exception region. */
1269
1270 int
1271 expand_eh_region_start_tree (decl, cleanup)
1272 tree decl;
1273 tree cleanup;
1274 {
1275 /* This is the old code. */
1276 if (! doing_eh (0))
1277 return 0;
1278
1279 /* The optimization only applies to actions protected with
1280 terminate, and only applies if we are using the setjmp/longjmp
1281 codegen method. */
1282 if (exceptions_via_longjmp
1283 && protect_cleanup_actions_with_terminate)
1284 {
1285 tree func, arg;
1286 tree args;
1287
1288 /* Ignore any UNSAVE_EXPR. */
1289 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
1290 cleanup = TREE_OPERAND (cleanup, 0);
1291
1292 /* Further, it only applies if the action is a call, if there
1293 are 2 arguments, and if the second argument is 2. */
1294
1295 if (TREE_CODE (cleanup) == CALL_EXPR
1296 && (args = TREE_OPERAND (cleanup, 1))
1297 && (func = TREE_OPERAND (cleanup, 0))
1298 && (arg = TREE_VALUE (args))
1299 && (args = TREE_CHAIN (args))
1300
1301 /* is the second argument 2? */
1302 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
1303 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
1304 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
1305
1306 /* Make sure there are no other arguments. */
1307 && TREE_CHAIN (args) == NULL_TREE)
1308 {
1309 /* Arrange for returns and gotos to pop the entry we make on the
1310 dynamic cleanup stack. */
1311 expand_dcc_cleanup (decl);
1312 start_dynamic_cleanup (func, arg);
1313 return 1;
1314 }
1315 }
1316
1317 expand_eh_region_start_for_decl (decl);
1318 ehstack.top->entry->finalization = cleanup;
1319
1320 return 0;
1321 }
1322
1323 /* Just like expand_eh_region_start, except if a cleanup action is
1324 entered on the cleanup chain, the TREE_PURPOSE of the element put
1325 on the chain is DECL. DECL should be the associated VAR_DECL, if
1326 any, otherwise it should be NULL_TREE. */
1327
1328 void
1329 expand_eh_region_start_for_decl (decl)
1330 tree decl;
1331 {
1332 rtx note;
1333
1334 /* This is the old code. */
1335 if (! doing_eh (0))
1336 return;
1337
1338 if (exceptions_via_longjmp)
1339 {
1340 /* We need a new block to record the start and end of the
1341 dynamic handler chain. We could always do this, but we
1342 really want to permit jumping into such a block, and we want
1343 to avoid any errors or performance impact in the SJ EH code
1344 for now. */
1345 expand_start_bindings (0);
1346
1347 /* But we don't need or want a new temporary level. */
1348 pop_temp_slots ();
1349
1350 /* Mark this block as created by expand_eh_region_start. This
1351 is so that we can pop the block with expand_end_bindings
1352 automatically. */
1353 mark_block_as_eh_region ();
1354
1355 /* Arrange for returns and gotos to pop the entry we make on the
1356 dynamic handler stack. */
1357 expand_dhc_cleanup (decl);
1358 }
1359
1360 push_eh_entry (&ehstack);
1361 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1362 NOTE_BLOCK_NUMBER (note)
1363 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1364 if (exceptions_via_longjmp)
1365 start_dynamic_handler ();
1366 }
1367
1368 /* Start an exception handling region. All instructions emitted after
1369 this point are considered to be part of the region until
1370 expand_eh_region_end is invoked. */
1371
1372 void
1373 expand_eh_region_start ()
1374 {
1375 expand_eh_region_start_for_decl (NULL_TREE);
1376 }
1377
1378 /* End an exception handling region. The information about the region
1379 is found on the top of ehstack.
1380
1381 HANDLER is either the cleanup for the exception region, or if we're
1382 marking the end of a try block, HANDLER is integer_zero_node.
1383
1384 HANDLER will be transformed to rtl when expand_leftover_cleanups
1385 is invoked. */
1386
1387 void
1388 expand_eh_region_end (handler)
1389 tree handler;
1390 {
1391 struct eh_entry *entry;
1392 rtx note;
1393
1394 if (! doing_eh (0))
1395 return;
1396
1397 entry = pop_eh_entry (&ehstack);
1398
1399 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1400 NOTE_BLOCK_NUMBER (note)
1401 = CODE_LABEL_NUMBER (entry->exception_handler_label);
1402 if (exceptions_via_longjmp == 0
1403 /* We share outer_context between regions; only emit it once. */
1404 && INSN_UID (entry->outer_context) == 0)
1405 {
1406 rtx label;
1407
1408 label = gen_label_rtx ();
1409 emit_jump (label);
1410
1411 /* Emit a label marking the end of this exception region that
1412 is used for rethrowing into the outer context. */
1413 emit_label (entry->outer_context);
1414 expand_internal_throw ();
1415
1416 emit_label (label);
1417 }
1418
1419 entry->finalization = handler;
1420
1421 /* create region entry in final exception table */
1422 new_eh_region_entry (NOTE_BLOCK_NUMBER (note));
1423
1424 enqueue_eh_entry (&ehqueue, entry);
1425
1426 /* If we have already started ending the bindings, don't recurse.
1427 This only happens when exceptions_via_longjmp is true. */
1428 if (is_eh_region ())
1429 {
1430 /* Because we don't need or want a new temporary level and
1431 because we didn't create one in expand_eh_region_start,
1432 create a fake one now to avoid removing one in
1433 expand_end_bindings. */
1434 push_temp_slots ();
1435
1436 mark_block_as_not_eh_region ();
1437
1438 /* Maybe do this to prevent jumping in and so on... */
1439 expand_end_bindings (NULL_TREE, 0, 0);
1440 }
1441 }
1442
1443 /* End the EH region for a goto fixup. We only need them in the region-based
1444 EH scheme. */
1445
1446 void
1447 expand_fixup_region_start ()
1448 {
1449 if (! doing_eh (0) || exceptions_via_longjmp)
1450 return;
1451
1452 expand_eh_region_start ();
1453 }
1454
1455 /* End the EH region for a goto fixup. CLEANUP is the cleanup we just
1456 expanded; to avoid running it twice if it throws, we look through the
1457 ehqueue for a matching region and rethrow from its outer_context. */
1458
1459 void
1460 expand_fixup_region_end (cleanup)
1461 tree cleanup;
1462 {
1463 struct eh_node *node;
1464
1465 if (! doing_eh (0) || exceptions_via_longjmp)
1466 return;
1467
1468 for (node = ehstack.top; node && node->entry->finalization != cleanup; )
1469 node = node->chain;
1470 if (node == 0)
1471 for (node = ehqueue.head; node && node->entry->finalization != cleanup; )
1472 node = node->chain;
1473 if (node == 0)
1474 abort ();
1475
1476 ehstack.top->entry->outer_context = node->entry->outer_context;
1477
1478 /* Just rethrow. size_zero_node is just a NOP. */
1479 expand_eh_region_end (size_zero_node);
1480 }
1481
1482 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1483 call to __sjthrow.
1484
1485 Otherwise, we emit a call to __throw and note that we threw
1486 something, so we know we need to generate the necessary code for
1487 __throw.
1488
1489 Before invoking throw, the __eh_pc variable must have been set up
1490 to contain the PC being thrown from. This address is used by
1491 __throw to determine which exception region (if any) is
1492 responsible for handling the exception. */
1493
1494 void
1495 emit_throw ()
1496 {
1497 if (exceptions_via_longjmp)
1498 {
1499 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1500 }
1501 else
1502 {
1503 #ifdef JUMP_TO_THROW
1504 emit_indirect_jump (throw_libfunc);
1505 #else
1506 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1507 #endif
1508 }
1509 emit_barrier ();
1510 }
1511
1512 /* Throw the current exception. If appropriate, this is done by jumping
1513 to the next handler. */
1514
1515 void
1516 expand_internal_throw ()
1517 {
1518 emit_throw ();
1519 }
1520
1521 /* Called from expand_exception_blocks and expand_end_catch_block to
1522 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1523
1524 void
1525 expand_leftover_cleanups ()
1526 {
1527 struct eh_entry *entry;
1528
1529 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1530 {
1531 rtx prev;
1532
1533 /* A leftover try block. Shouldn't be one here. */
1534 if (entry->finalization == integer_zero_node)
1535 abort ();
1536
1537 /* Output the label for the start of the exception handler. */
1538
1539 receive_exception_label (entry->exception_handler_label);
1540
1541 /* register a handler for this cleanup region */
1542 add_new_handler (
1543 find_func_region (CODE_LABEL_NUMBER (entry->exception_handler_label)),
1544 get_new_handler (entry->exception_handler_label, NULL));
1545
1546 /* And now generate the insns for the handler. */
1547 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1548
1549 prev = get_last_insn ();
1550 if (prev == NULL || GET_CODE (prev) != BARRIER)
1551 /* Emit code to throw to the outer context if we fall off
1552 the end of the handler. */
1553 expand_rethrow (entry->outer_context);
1554
1555 do_pending_stack_adjust ();
1556 free (entry);
1557 }
1558 }
1559
1560 /* Called at the start of a block of try statements. */
1561 void
1562 expand_start_try_stmts ()
1563 {
1564 if (! doing_eh (1))
1565 return;
1566
1567 expand_eh_region_start ();
1568 }
1569
1570 /* Called to begin a catch clause. The parameter is the object which
1571 will be passed to the runtime type check routine. */
1572 void
1573 start_catch_handler (rtime)
1574 tree rtime;
1575 {
1576 rtx handler_label;
1577 int insn_region_num;
1578 int eh_region_entry;
1579
1580 if (! doing_eh (1))
1581 return;
1582
1583 handler_label = catchstack.top->entry->exception_handler_label;
1584 insn_region_num = CODE_LABEL_NUMBER (handler_label);
1585 eh_region_entry = find_func_region (insn_region_num);
1586
1587 /* If we've already issued this label, pick a new one */
1588 if (catchstack.top->entry->label_used)
1589 handler_label = gen_exception_label ();
1590 else
1591 catchstack.top->entry->label_used = 1;
1592
1593 receive_exception_label (handler_label);
1594
1595 add_new_handler (eh_region_entry, get_new_handler (handler_label, rtime));
1596 }
1597
1598 /* Generate RTL for the start of a group of catch clauses.
1599
1600 It is responsible for starting a new instruction sequence for the
1601 instructions in the catch block, and expanding the handlers for the
1602 internally-generated exception regions nested within the try block
1603 corresponding to this catch block. */
1604
1605 void
1606 expand_start_all_catch ()
1607 {
1608 struct eh_entry *entry;
1609 tree label;
1610 rtx outer_context;
1611
1612 if (! doing_eh (1))
1613 return;
1614
1615 outer_context = ehstack.top->entry->outer_context;
1616
1617 /* End the try block. */
1618 expand_eh_region_end (integer_zero_node);
1619
1620 emit_line_note (input_filename, lineno);
1621 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1622
1623 /* The label for the exception handling block that we will save.
1624 This is Lresume in the documentation. */
1625 expand_label (label);
1626
1627 /* Push the label that points to where normal flow is resumed onto
1628 the top of the label stack. */
1629 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1630
1631 /* Start a new sequence for all the catch blocks. We will add this
1632 to the global sequence catch_clauses when we have completed all
1633 the handlers in this handler-seq. */
1634 start_sequence ();
1635
1636 entry = dequeue_eh_entry (&ehqueue);
1637 for ( ; entry->finalization != integer_zero_node;
1638 entry = dequeue_eh_entry (&ehqueue))
1639 {
1640 rtx prev;
1641
1642 /* Emit the label for the cleanup handler for this region, and
1643 expand the code for the handler.
1644
1645 Note that a catch region is handled as a side-effect here;
1646 for a try block, entry->finalization will contain
1647 integer_zero_node, so no code will be generated in the
1648 expand_expr call below. But, the label for the handler will
1649 still be emitted, so any code emitted after this point will
1650 end up being the handler. */
1651
1652 receive_exception_label (entry->exception_handler_label);
1653
1654 /* register a handler for this cleanup region */
1655 add_new_handler (
1656 find_func_region (CODE_LABEL_NUMBER (entry->exception_handler_label)),
1657 get_new_handler (entry->exception_handler_label, NULL));
1658
1659 /* And now generate the insns for the cleanup handler. */
1660 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1661
1662 prev = get_last_insn ();
1663 if (prev == NULL || GET_CODE (prev) != BARRIER)
1664 /* Code to throw out to outer context when we fall off end
1665 of the handler. We can't do this here for catch blocks,
1666 so it's done in expand_end_all_catch instead. */
1667 expand_rethrow (entry->outer_context);
1668
1669 do_pending_stack_adjust ();
1670 free (entry);
1671 }
1672
1673 /* At this point, all the cleanups are done, and the ehqueue now has
1674 the current exception region at its head. We dequeue it, and put it
1675 on the catch stack. */
1676
1677 push_entry (&catchstack, entry);
1678
1679 /* If we are not doing setjmp/longjmp EH, because we are reordered
1680 out of line, we arrange to rethrow in the outer context. We need to
1681 do this because we are not physically within the region, if any, that
1682 logically contains this catch block. */
1683 if (! exceptions_via_longjmp)
1684 {
1685 expand_eh_region_start ();
1686 ehstack.top->entry->outer_context = outer_context;
1687 }
1688
1689 /* We also have to start the handler if we aren't using the new model. */
1690 if (! flag_new_exceptions)
1691 start_catch_handler (NULL);
1692 }
1693
1694 /* Finish up the catch block. At this point all the insns for the
1695 catch clauses have already been generated, so we only have to add
1696 them to the catch_clauses list. We also want to make sure that if
1697 we fall off the end of the catch clauses that we rethrow to the
1698 outer EH region. */
1699
1700 void
1701 expand_end_all_catch ()
1702 {
1703 rtx new_catch_clause, outer_context = NULL_RTX;
1704 struct eh_entry *entry;
1705
1706 if (! doing_eh (1))
1707 return;
1708
1709 /* Dequeue the current catch clause region. */
1710 entry = pop_eh_entry (&catchstack);
1711 free (entry);
1712
1713 if (! exceptions_via_longjmp)
1714 {
1715 outer_context = ehstack.top->entry->outer_context;
1716
1717 /* Finish the rethrow region. size_zero_node is just a NOP. */
1718 expand_eh_region_end (size_zero_node);
1719 }
1720
1721 /* Code to throw out to outer context, if we fall off end of catch
1722 handlers. This is rethrow (Lresume, same id, same obj) in the
1723 documentation. We use Lresume because we know that it will throw
1724 to the correct context.
1725
1726 In other words, if the catch handler doesn't exit or return, we
1727 do a "throw" (using the address of Lresume as the point being
1728 thrown from) so that the outer EH region can then try to process
1729 the exception. */
1730 expand_rethrow (outer_context);
1731
1732 /* Now we have the complete catch sequence. */
1733 new_catch_clause = get_insns ();
1734 end_sequence ();
1735
1736 /* This level of catch blocks is done, so set up the successful
1737 catch jump label for the next layer of catch blocks. */
1738 pop_label_entry (&caught_return_label_stack);
1739 pop_label_entry (&outer_context_label_stack);
1740
1741 /* Add the new sequence of catches to the main one for this function. */
1742 push_to_sequence (catch_clauses);
1743 emit_insns (new_catch_clause);
1744 catch_clauses = get_insns ();
1745 end_sequence ();
1746
1747 /* Here we fall through into the continuation code. */
1748 }
1749
1750 /* Rethrow from the outer context LABEL. */
1751
1752 static void
1753 expand_rethrow (label)
1754 rtx label;
1755 {
1756 if (exceptions_via_longjmp)
1757 emit_throw ();
1758 else
1759 emit_jump (label);
1760 }
1761
1762 /* End all the pending exception regions on protect_list. The handlers
1763 will be emitted when expand_leftover_cleanups is invoked. */
1764
1765 void
1766 end_protect_partials ()
1767 {
1768 while (protect_list)
1769 {
1770 expand_eh_region_end (TREE_VALUE (protect_list));
1771 protect_list = TREE_CHAIN (protect_list);
1772 }
1773 }
1774
1775 /* Arrange for __terminate to be called if there is an unhandled throw
1776 from within E. */
1777
1778 tree
1779 protect_with_terminate (e)
1780 tree e;
1781 {
1782 /* We only need to do this when using setjmp/longjmp EH and the
1783 language requires it, as otherwise we protect all of the handlers
1784 at once, if we need to. */
1785 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1786 {
1787 tree handler, result;
1788
1789 /* All cleanups must be on the function_obstack. */
1790 push_obstacks_nochange ();
1791 resume_temporary_allocation ();
1792
1793 handler = make_node (RTL_EXPR);
1794 TREE_TYPE (handler) = void_type_node;
1795 RTL_EXPR_RTL (handler) = const0_rtx;
1796 TREE_SIDE_EFFECTS (handler) = 1;
1797 start_sequence_for_rtl_expr (handler);
1798
1799 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1800 emit_barrier ();
1801
1802 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1803 end_sequence ();
1804
1805 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1806 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1807 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1808 TREE_READONLY (result) = TREE_READONLY (e);
1809
1810 pop_obstacks ();
1811
1812 e = result;
1813 }
1814
1815 return e;
1816 }
1817 \f
1818 /* The exception table that we build that is used for looking up and
1819 dispatching exceptions, the current number of entries, and its
1820 maximum size before we have to extend it.
1821
1822 The number in eh_table is the code label number of the exception
1823 handler for the region. This is added by add_eh_table_entry and
1824 used by output_exception_table_entry. */
1825
1826 static int *eh_table = NULL;
1827 static int eh_table_size = 0;
1828 static int eh_table_max_size = 0;
1829
1830 /* Note the need for an exception table entry for region N. If we
1831 don't need to output an explicit exception table, avoid all of the
1832 extra work.
1833
1834 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1835 (Or NOTE_INSN_EH_REGION_END sometimes)
1836 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1837 label number of the exception handler for the region. */
1838
1839 void
1840 add_eh_table_entry (n)
1841 int n;
1842 {
1843 #ifndef OMIT_EH_TABLE
1844 if (eh_table_size >= eh_table_max_size)
1845 {
1846 if (eh_table)
1847 {
1848 eh_table_max_size += eh_table_max_size>>1;
1849
1850 if (eh_table_max_size < 0)
1851 abort ();
1852
1853 eh_table = (int *) xrealloc (eh_table,
1854 eh_table_max_size * sizeof (int));
1855 }
1856 else
1857 {
1858 eh_table_max_size = 252;
1859 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1860 }
1861 }
1862 eh_table[eh_table_size++] = n;
1863 #endif
1864 }
1865
1866 /* Return a non-zero value if we need to output an exception table.
1867
1868 On some platforms, we don't have to output a table explicitly.
1869 This routine doesn't mean we don't have one. */
1870
1871 int
1872 exception_table_p ()
1873 {
1874 if (eh_table)
1875 return 1;
1876
1877 return 0;
1878 }
1879
1880 /* Output the entry of the exception table corresponding to the
1881 exception region numbered N to file FILE.
1882
1883 N is the code label number corresponding to the handler of the
1884 region. */
1885
1886 static void
1887 output_exception_table_entry (file, n)
1888 FILE *file;
1889 int n;
1890 {
1891 char buf[256];
1892 rtx sym;
1893 struct handler_info *handler;
1894
1895 handler = get_first_handler (n);
1896
1897 for ( ; handler != NULL; handler = handler->next)
1898 {
1899 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1900 sym = gen_rtx_SYMBOL_REF (Pmode, buf);
1901 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1902
1903 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1904 sym = gen_rtx_SYMBOL_REF (Pmode, buf);
1905 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1906
1907 assemble_integer (handler->handler_label,
1908 POINTER_SIZE / BITS_PER_UNIT, 1);
1909
1910 if (flag_new_exceptions)
1911 {
1912 if (handler->type_info == NULL)
1913 assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1914 else
1915 if (handler->type_info == CATCH_ALL_TYPE)
1916 assemble_integer (GEN_INT (CATCH_ALL_TYPE),
1917 POINTER_SIZE / BITS_PER_UNIT, 1);
1918 else
1919 output_constant ((tree)(handler->type_info),
1920 POINTER_SIZE / BITS_PER_UNIT);
1921 }
1922 putc ('\n', file); /* blank line */
1923 }
1924 }
1925
1926 /* Output the exception table if we have and need one. */
1927
1928 static short language_code = 0;
1929 static short version_code = 0;
1930
1931 /* This routine will set the language code for exceptions. */
1932 void set_exception_lang_code (code)
1933 short code;
1934 {
1935 language_code = code;
1936 }
1937
1938 /* This routine will set the language version code for exceptions. */
1939 void set_exception_version_code (code)
1940 short code;
1941 {
1942 version_code = code;
1943 }
1944
1945
1946 void
1947 output_exception_table ()
1948 {
1949 int i;
1950 extern FILE *asm_out_file;
1951
1952 if (! doing_eh (0) || ! eh_table)
1953 return;
1954
1955 exception_section ();
1956
1957 /* Beginning marker for table. */
1958 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1959 assemble_label ("__EXCEPTION_TABLE__");
1960
1961 if (flag_new_exceptions)
1962 {
1963 assemble_integer (GEN_INT (NEW_EH_RUNTIME),
1964 POINTER_SIZE / BITS_PER_UNIT, 1);
1965 assemble_integer (GEN_INT (language_code), 2 , 1);
1966 assemble_integer (GEN_INT (version_code), 2 , 1);
1967
1968 /* Add enough padding to make sure table aligns on a pointer boundry. */
1969 i = GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT - 4;
1970 for ( ; i < 0; i = i + GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT)
1971 ;
1972 if (i != 0)
1973 assemble_integer (const0_rtx, i , 1);
1974 }
1975
1976 for (i = 0; i < eh_table_size; ++i)
1977 output_exception_table_entry (asm_out_file, eh_table[i]);
1978
1979 free (eh_table);
1980 clear_function_eh_region ();
1981
1982 /* Ending marker for table. */
1983 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1984
1985 /* for binary compatability, the old __throw checked the second
1986 position for a -1, so we should output at least 2 -1's */
1987 if (! flag_new_exceptions)
1988 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1989
1990 putc ('\n', asm_out_file); /* blank line */
1991 }
1992 \f
1993 /* Emit code to get EH context.
1994
1995 We have to scan thru the code to find possible EH context registers.
1996 Inlined functions may use it too, and thus we'll have to be able
1997 to change them too.
1998
1999 This is done only if using exceptions_via_longjmp. */
2000
2001 void
2002 emit_eh_context ()
2003 {
2004 rtx insn;
2005 rtx ehc = 0;
2006
2007 if (! doing_eh (0))
2008 return;
2009
2010 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2011 if (GET_CODE (insn) == INSN
2012 && GET_CODE (PATTERN (insn)) == USE)
2013 {
2014 rtx reg = find_reg_note (insn, REG_EH_CONTEXT, 0);
2015 if (reg)
2016 {
2017 rtx insns;
2018
2019 start_sequence ();
2020
2021 /* If this is the first use insn, emit the call here. This
2022 will always be at the top of our function, because if
2023 expand_inline_function notices a REG_EH_CONTEXT note, it
2024 adds a use insn to this function as well. */
2025 if (ehc == 0)
2026 ehc = call_get_eh_context ();
2027
2028 emit_move_insn (XEXP (reg, 0), ehc);
2029 insns = get_insns ();
2030 end_sequence ();
2031
2032 emit_insns_before (insns, insn);
2033 }
2034 }
2035 }
2036
2037 /* Scan the current insns and build a list of handler labels. The
2038 resulting list is placed in the global variable exception_handler_labels.
2039
2040 It is called after the last exception handling region is added to
2041 the current function (when the rtl is almost all built for the
2042 current function) and before the jump optimization pass. */
2043
2044 void
2045 find_exception_handler_labels ()
2046 {
2047 rtx insn;
2048
2049 exception_handler_labels = NULL_RTX;
2050
2051 /* If we aren't doing exception handling, there isn't much to check. */
2052 if (! doing_eh (0))
2053 return;
2054
2055 /* For each start of a region, add its label to the list. */
2056
2057 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2058 {
2059 struct handler_info* ptr;
2060 if (GET_CODE (insn) == NOTE
2061 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2062 {
2063 ptr = get_first_handler (NOTE_BLOCK_NUMBER (insn));
2064 for ( ; ptr; ptr = ptr->next)
2065 {
2066 /* make sure label isn't in the list already */
2067 rtx x;
2068 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2069 if (XEXP (x, 0) == ptr->handler_label)
2070 break;
2071 if (! x)
2072 exception_handler_labels = gen_rtx_EXPR_LIST (VOIDmode,
2073 ptr->handler_label, exception_handler_labels);
2074 }
2075 }
2076 }
2077 }
2078
2079 /* Return a value of 1 if the parameter label number is an exception handler
2080 label. Return 0 otherwise. */
2081
2082 int
2083 is_exception_handler_label (lab)
2084 int lab;
2085 {
2086 rtx x;
2087 for (x = exception_handler_labels ; x ; x = XEXP (x, 1))
2088 if (lab == CODE_LABEL_NUMBER (XEXP (x, 0)))
2089 return 1;
2090 return 0;
2091 }
2092
2093 /* Perform sanity checking on the exception_handler_labels list.
2094
2095 Can be called after find_exception_handler_labels is called to
2096 build the list of exception handlers for the current function and
2097 before we finish processing the current function. */
2098
2099 void
2100 check_exception_handler_labels ()
2101 {
2102 rtx insn, insn2;
2103
2104 /* If we aren't doing exception handling, there isn't much to check. */
2105 if (! doing_eh (0))
2106 return;
2107
2108 /* Make sure there is no more than 1 copy of a label */
2109 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
2110 {
2111 int count = 0;
2112 for (insn2 = exception_handler_labels; insn2; insn2 = XEXP (insn2, 1))
2113 if (XEXP (insn, 0) == XEXP (insn2, 0))
2114 count++;
2115 if (count != 1)
2116 warning ("Counted %d copies of EH region %d in list.\n", count,
2117 CODE_LABEL_NUMBER (insn));
2118 }
2119
2120 }
2121 \f
2122 /* This group of functions initializes the exception handling data
2123 structures at the start of the compilation, initializes the data
2124 structures at the start of a function, and saves and restores the
2125 exception handling data structures for the start/end of a nested
2126 function. */
2127
2128 /* Toplevel initialization for EH things. */
2129
2130 void
2131 init_eh ()
2132 {
2133 }
2134
2135 /* Initialize the per-function EH information. */
2136
2137 void
2138 init_eh_for_function ()
2139 {
2140 ehstack.top = 0;
2141 catchstack.top = 0;
2142 ehqueue.head = ehqueue.tail = 0;
2143 catch_clauses = NULL_RTX;
2144 false_label_stack = 0;
2145 caught_return_label_stack = 0;
2146 protect_list = NULL_TREE;
2147 current_function_ehc = NULL_RTX;
2148 }
2149
2150 /* Save some of the per-function EH info into the save area denoted by
2151 P.
2152
2153 This is currently called from save_stmt_status. */
2154
2155 void
2156 save_eh_status (p)
2157 struct function *p;
2158 {
2159 if (p == NULL)
2160 abort ();
2161
2162 p->ehstack = ehstack;
2163 p->catchstack = catchstack;
2164 p->ehqueue = ehqueue;
2165 p->catch_clauses = catch_clauses;
2166 p->false_label_stack = false_label_stack;
2167 p->caught_return_label_stack = caught_return_label_stack;
2168 p->protect_list = protect_list;
2169 p->ehc = current_function_ehc;
2170
2171 init_eh_for_function ();
2172 }
2173
2174 /* Restore the per-function EH info saved into the area denoted by P.
2175
2176 This is currently called from restore_stmt_status. */
2177
2178 void
2179 restore_eh_status (p)
2180 struct function *p;
2181 {
2182 if (p == NULL)
2183 abort ();
2184
2185 protect_list = p->protect_list;
2186 caught_return_label_stack = p->caught_return_label_stack;
2187 false_label_stack = p->false_label_stack;
2188 catch_clauses = p->catch_clauses;
2189 ehqueue = p->ehqueue;
2190 ehstack = p->ehstack;
2191 catchstack = p->catchstack;
2192 current_function_ehc = p->ehc;
2193 }
2194 \f
2195 /* This section is for the exception handling specific optimization
2196 pass. First are the internal routines, and then the main
2197 optimization pass. */
2198
2199 /* Determine if the given INSN can throw an exception. */
2200
2201 static int
2202 can_throw (insn)
2203 rtx insn;
2204 {
2205 /* Calls can always potentially throw exceptions. */
2206 if (GET_CODE (insn) == CALL_INSN)
2207 return 1;
2208
2209 if (asynchronous_exceptions)
2210 {
2211 /* If we wanted asynchronous exceptions, then everything but NOTEs
2212 and CODE_LABELs could throw. */
2213 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2214 return 1;
2215 }
2216
2217 return 0;
2218 }
2219
2220 /* Scan a exception region looking for the matching end and then
2221 remove it if possible. INSN is the start of the region, N is the
2222 region number, and DELETE_OUTER is to note if anything in this
2223 region can throw.
2224
2225 Regions are removed if they cannot possibly catch an exception.
2226 This is determined by invoking can_throw on each insn within the
2227 region; if can_throw returns true for any of the instructions, the
2228 region can catch an exception, since there is an insn within the
2229 region that is capable of throwing an exception.
2230
2231 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2232 calls abort if it can't find one.
2233
2234 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2235 correspond to the region number, or if DELETE_OUTER is NULL. */
2236
2237 static rtx
2238 scan_region (insn, n, delete_outer)
2239 rtx insn;
2240 int n;
2241 int *delete_outer;
2242 {
2243 rtx start = insn;
2244
2245 /* Assume we can delete the region. */
2246 int delete = 1;
2247
2248 if (insn == NULL_RTX
2249 || GET_CODE (insn) != NOTE
2250 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
2251 || NOTE_BLOCK_NUMBER (insn) != n
2252 || delete_outer == NULL)
2253 abort ();
2254
2255 insn = NEXT_INSN (insn);
2256
2257 /* Look for the matching end. */
2258 while (! (GET_CODE (insn) == NOTE
2259 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2260 {
2261 /* If anything can throw, we can't remove the region. */
2262 if (delete && can_throw (insn))
2263 {
2264 delete = 0;
2265 }
2266
2267 /* Watch out for and handle nested regions. */
2268 if (GET_CODE (insn) == NOTE
2269 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2270 {
2271 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2272 }
2273
2274 insn = NEXT_INSN (insn);
2275 }
2276
2277 /* The _BEG/_END NOTEs must match and nest. */
2278 if (NOTE_BLOCK_NUMBER (insn) != n)
2279 abort ();
2280
2281 /* If anything in this exception region can throw, we can throw. */
2282 if (! delete)
2283 *delete_outer = 0;
2284 else
2285 {
2286 /* Delete the start and end of the region. */
2287 delete_insn (start);
2288 delete_insn (insn);
2289
2290 /* We no longer removed labels here, since flow will now remove any
2291 handler which cannot be called any more. */
2292
2293 #if 0
2294 /* Only do this part if we have built the exception handler
2295 labels. */
2296 if (exception_handler_labels)
2297 {
2298 rtx x, *prev = &exception_handler_labels;
2299
2300 /* Find it in the list of handlers. */
2301 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2302 {
2303 rtx label = XEXP (x, 0);
2304 if (CODE_LABEL_NUMBER (label) == n)
2305 {
2306 /* If we are the last reference to the handler,
2307 delete it. */
2308 if (--LABEL_NUSES (label) == 0)
2309 delete_insn (label);
2310
2311 if (optimize)
2312 {
2313 /* Remove it from the list of exception handler
2314 labels, if we are optimizing. If we are not, then
2315 leave it in the list, as we are not really going to
2316 remove the region. */
2317 *prev = XEXP (x, 1);
2318 XEXP (x, 1) = 0;
2319 XEXP (x, 0) = 0;
2320 }
2321
2322 break;
2323 }
2324 prev = &XEXP (x, 1);
2325 }
2326 }
2327 #endif
2328 }
2329 return insn;
2330 }
2331
2332 /* Perform various interesting optimizations for exception handling
2333 code.
2334
2335 We look for empty exception regions and make them go (away). The
2336 jump optimization code will remove the handler if nothing else uses
2337 it. */
2338
2339 void
2340 exception_optimize ()
2341 {
2342 rtx insn;
2343 int n;
2344
2345 /* Remove empty regions. */
2346 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2347 {
2348 if (GET_CODE (insn) == NOTE
2349 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2350 {
2351 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2352 insn, we will indirectly skip through all the insns
2353 inbetween. We are also guaranteed that the value of insn
2354 returned will be valid, as otherwise scan_region won't
2355 return. */
2356 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2357 }
2358 }
2359 }
2360 \f
2361 /* Various hooks for the DWARF 2 __throw routine. */
2362
2363 /* Do any necessary initialization to access arbitrary stack frames.
2364 On the SPARC, this means flushing the register windows. */
2365
2366 void
2367 expand_builtin_unwind_init ()
2368 {
2369 /* Set this so all the registers get saved in our frame; we need to be
2370 able to copy the saved values for any registers from frames we unwind. */
2371 current_function_has_nonlocal_label = 1;
2372
2373 #ifdef SETUP_FRAME_ADDRESSES
2374 SETUP_FRAME_ADDRESSES ();
2375 #endif
2376 }
2377
2378 /* Given a value extracted from the return address register or stack slot,
2379 return the actual address encoded in that value. */
2380
2381 rtx
2382 expand_builtin_extract_return_addr (addr_tree)
2383 tree addr_tree;
2384 {
2385 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2386 return eh_outer_context (addr);
2387 }
2388
2389 /* Given an actual address in addr_tree, do any necessary encoding
2390 and return the value to be stored in the return address register or
2391 stack slot so the epilogue will return to that address. */
2392
2393 rtx
2394 expand_builtin_frob_return_addr (addr_tree)
2395 tree addr_tree;
2396 {
2397 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2398 #ifdef RETURN_ADDR_OFFSET
2399 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2400 #endif
2401 return addr;
2402 }
2403
2404 /* Given an actual address in addr_tree, set the return address register up
2405 so the epilogue will return to that address. If the return address is
2406 not in a register, do nothing. */
2407
2408 void
2409 expand_builtin_set_return_addr_reg (addr_tree)
2410 tree addr_tree;
2411 {
2412 rtx tmp;
2413 rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2414 0, hard_frame_pointer_rtx);
2415
2416 if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2417 return;
2418
2419 tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2420 if (tmp != ra)
2421 emit_move_insn (ra, tmp);
2422 }
2423
2424 /* Choose two registers for communication between the main body of
2425 __throw and the stub for adjusting the stack pointer. The first register
2426 is used to pass the address of the exception handler; the second register
2427 is used to pass the stack pointer offset.
2428
2429 For register 1 we use the return value register for a void *.
2430 For register 2 we use the static chain register if it exists and is
2431 different from register 1, otherwise some arbitrary call-clobbered
2432 register. */
2433
2434 static void
2435 eh_regs (r1, r2, outgoing)
2436 rtx *r1, *r2;
2437 int outgoing;
2438 {
2439 rtx reg1, reg2;
2440
2441 #ifdef FUNCTION_OUTGOING_VALUE
2442 if (outgoing)
2443 reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2444 current_function_decl);
2445 else
2446 #endif
2447 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2448 current_function_decl);
2449
2450 #ifdef STATIC_CHAIN_REGNUM
2451 if (outgoing)
2452 reg2 = static_chain_incoming_rtx;
2453 else
2454 reg2 = static_chain_rtx;
2455 if (REGNO (reg2) == REGNO (reg1))
2456 #endif /* STATIC_CHAIN_REGNUM */
2457 reg2 = NULL_RTX;
2458
2459 if (reg2 == NULL_RTX)
2460 {
2461 int i;
2462 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2463 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2464 {
2465 reg2 = gen_rtx_REG (Pmode, i);
2466 break;
2467 }
2468
2469 if (reg2 == NULL_RTX)
2470 abort ();
2471 }
2472
2473 *r1 = reg1;
2474 *r2 = reg2;
2475 }
2476
2477
2478 /* Retrieve the register which contains the pointer to the eh_context
2479 structure set the __throw. */
2480
2481 rtx
2482 get_reg_for_handler ()
2483 {
2484 rtx reg1;
2485 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2486 current_function_decl);
2487 return reg1;
2488 }
2489
2490
2491 /* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2492 to the exception handler. __throw will set up the necessary values
2493 and then return to the stub. */
2494
2495 rtx
2496 expand_builtin_eh_stub_old ()
2497 {
2498 rtx stub_start = gen_label_rtx ();
2499 rtx after_stub = gen_label_rtx ();
2500 rtx handler, offset;
2501
2502 emit_jump (after_stub);
2503 emit_label (stub_start);
2504
2505 eh_regs (&handler, &offset, 0);
2506
2507 adjust_stack (offset);
2508 emit_indirect_jump (handler);
2509 emit_label (after_stub);
2510 return gen_rtx_LABEL_REF (Pmode, stub_start);
2511 }
2512
2513 rtx
2514 expand_builtin_eh_stub ()
2515 {
2516 rtx stub_start = gen_label_rtx ();
2517 rtx after_stub = gen_label_rtx ();
2518 rtx handler, offset;
2519 rtx temp;
2520
2521 emit_jump (after_stub);
2522 emit_label (stub_start);
2523
2524 eh_regs (&handler, &offset, 0);
2525
2526 adjust_stack (offset);
2527
2528 /* Handler is in fact a pointer to the _eh_context structure, we need
2529 to pick out the handler field (first element), and jump to there,
2530 leaving the pointer to _eh_conext in the same hardware register. */
2531
2532 temp = gen_rtx_MEM (Pmode, handler);
2533 MEM_IN_STRUCT_P (temp) = 1;
2534 RTX_UNCHANGING_P (temp) = 1;
2535 emit_move_insn (offset, temp);
2536 emit_insn (gen_rtx_USE (Pmode, handler));
2537
2538 emit_indirect_jump (offset);
2539
2540 emit_label (after_stub);
2541 return gen_rtx_LABEL_REF (Pmode, stub_start);
2542 }
2543
2544 /* Set up the registers for passing the handler address and stack offset
2545 to the stub above. */
2546
2547 void
2548 expand_builtin_set_eh_regs (handler, offset)
2549 tree handler, offset;
2550 {
2551 rtx reg1, reg2;
2552
2553 eh_regs (&reg1, &reg2, 1);
2554
2555 store_expr (offset, reg2, 0);
2556 store_expr (handler, reg1, 0);
2557
2558 /* These will be used by the stub. */
2559 emit_insn (gen_rtx_USE (VOIDmode, reg1));
2560 emit_insn (gen_rtx_USE (VOIDmode, reg2));
2561 }
2562
2563 \f
2564
2565 /* This contains the code required to verify whether arbitrary instructions
2566 are in the same exception region. */
2567
2568 static int *insn_eh_region = (int *)0;
2569 static int maximum_uid;
2570
2571 static void
2572 set_insn_eh_region (first, region_num)
2573 rtx *first;
2574 int region_num;
2575 {
2576 rtx insn;
2577 int rnum;
2578
2579 for (insn = *first; insn; insn = NEXT_INSN (insn))
2580 {
2581 if ((GET_CODE (insn) == NOTE) &&
2582 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG))
2583 {
2584 rnum = NOTE_BLOCK_NUMBER (insn);
2585 insn_eh_region[INSN_UID (insn)] = rnum;
2586 insn = NEXT_INSN (insn);
2587 set_insn_eh_region (&insn, rnum);
2588 /* Upon return, insn points to the EH_REGION_END of nested region */
2589 continue;
2590 }
2591 insn_eh_region[INSN_UID (insn)] = region_num;
2592 if ((GET_CODE (insn) == NOTE) &&
2593 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2594 break;
2595 }
2596 *first = insn;
2597 }
2598
2599 /* Free the insn table, an make sure it cannot be used again. */
2600
2601 void
2602 free_insn_eh_region ()
2603 {
2604 if (!doing_eh (0))
2605 return;
2606
2607 if (insn_eh_region)
2608 {
2609 free (insn_eh_region);
2610 insn_eh_region = (int *)0;
2611 }
2612 }
2613
2614 /* Initialize the table. max_uid must be calculated and handed into
2615 this routine. If it is unavailable, passing a value of 0 will
2616 cause this routine to calculate it as well. */
2617
2618 void
2619 init_insn_eh_region (first, max_uid)
2620 rtx first;
2621 int max_uid;
2622 {
2623 rtx insn;
2624
2625 if (!doing_eh (0))
2626 return;
2627
2628 if (insn_eh_region)
2629 free_insn_eh_region();
2630
2631 if (max_uid == 0)
2632 for (insn = first; insn; insn = NEXT_INSN (insn))
2633 if (INSN_UID (insn) > max_uid) /* find largest UID */
2634 max_uid = INSN_UID (insn);
2635
2636 maximum_uid = max_uid;
2637 insn_eh_region = (int *) malloc ((max_uid + 1) * sizeof (int));
2638 insn = first;
2639 set_insn_eh_region (&insn, 0);
2640 }
2641
2642
2643 /* Check whether 2 instructions are within the same region. */
2644
2645 int
2646 in_same_eh_region (insn1, insn2)
2647 rtx insn1, insn2;
2648 {
2649 int ret, uid1, uid2;
2650
2651 /* If no exceptions, instructions are always in same region. */
2652 if (!doing_eh (0))
2653 return 1;
2654
2655 /* If the table isn't allocated, assume the worst. */
2656 if (!insn_eh_region)
2657 return 0;
2658
2659 uid1 = INSN_UID (insn1);
2660 uid2 = INSN_UID (insn2);
2661
2662 /* if instructions have been allocated beyond the end, either
2663 the table is out of date, or this is a late addition, or
2664 something... Assume the worst. */
2665 if (uid1 > maximum_uid || uid2 > maximum_uid)
2666 return 0;
2667
2668 ret = (insn_eh_region[uid1] == insn_eh_region[uid2]);
2669 return ret;
2670 }
2671
This page took 0.16584 seconds and 6 git commands to generate.