]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
except.c (find_exception_handler_labels): Use xmalloc instead of alloca...
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-95, 1996 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code assocated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fnosjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementaion, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 At program startup each object file invokes a function named
91 __register_exceptions with the address of its local
92 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c,
93 and is responsible for recording all of the exception regions into
94 one list (which is kept in a static variable named exception_table_list).
95
96 The function __throw is actually responsible for doing the
97 throw. In the C++ frontend, __throw is generated on a
98 per-object-file basis for each source file compiled with
99 -fexceptions. Before __throw is invoked, the current context
100 of the throw needs to be placed in the global variable __eh_pc.
101
102 __throw attempts to find the appropriate exception handler for the
103 PC value stored in __eh_pc by calling __find_first_exception_table_match
104 (which is defined in libgcc2.c). If __find_first_exception_table_match
105 finds a relevant handler, __throw jumps directly to it.
106
107 If a handler for the context being thrown from can't be found,
108 __throw is responsible for unwinding the stack, determining the
109 address of the caller of the current function (which will be used
110 as the new context to throw from), and then restarting the process
111 of searching for a handler for the new context. __throw may also
112 call abort if it is unable to unwind the stack, and can also
113 call an external library function named __terminate if it reaches
114 the top of the stack without finding an appropriate handler. (By
115 default __terminate invokes abort, but this behavior can be
116 changed by the user to perform some sort of cleanup behavior before
117 exiting).
118
119 Internal implementation details:
120
121 To associate a user-defined handler with a block of statements, the
122 function expand_start_try_stmts is used to mark the start of the
123 block of statements with which the handler is to be associated
124 (which is known as a "try block"). All statements that appear
125 afterwards will be associated with the try block.
126
127 A call to expand_start_all_catch marks the end of the try block,
128 and also marks the start of the "catch block" (the user-defined
129 handler) associated with the try block.
130
131 This user-defined handler will be invoked for *every* exception
132 thrown with the context of the try block. It is up to the handler
133 to decide whether or not it wishes to handle any given exception,
134 as there is currently no mechanism in this implementation for doing
135 this. (There are plans for conditionally processing an exception
136 based on its "type", which will provide a language-independent
137 mechanism).
138
139 If the handler chooses not to process the exception (perhaps by
140 looking at an "exception type" or some other additional data
141 supplied with the exception), it can fall through to the end of the
142 handler. expand_end_all_catch and expand_leftover_cleanups
143 add additional code to the end of each handler to take care of
144 rethrowing to the outer exception handler.
145
146 The handler also has the option to continue with "normal flow of
147 code", or in other words to resume executing at the statement
148 immediately after the end of the exception region. The variable
149 caught_return_label_stack contains a stack of labels, and jumping
150 to the topmost entry's label via expand_goto will resume normal
151 flow to the statement immediately after the end of the exception
152 region. If the handler falls through to the end, the exception will
153 be rethrown to the outer exception region.
154
155 The instructions for the catch block are kept as a separate
156 sequence, and will be emitted at the end of the function along with
157 the handlers specified via expand_eh_region_end. The end of the
158 catch block is marked with expand_end_all_catch.
159
160 Any data associated with the exception must currently be handled by
161 some external mechanism maintained in the frontend. For example,
162 the C++ exception mechanism passes an arbitrary value along with
163 the exception, and this is handled in the C++ frontend by using a
164 global variable to hold the value. (This will be changing in the
165 future.)
166
167 The mechanism in C++ for handling data associated with the
168 exception is clearly not thread-safe. For a thread-based
169 environment, another mechanism must be used (possibly using a
170 per-thread allocation mechanism if the size of the area that needs
171 to be allocated isn't known at compile time.)
172
173 Internally-generated exception regions (cleanups) are marked by
174 calling expand_eh_region_start to mark the start of the region,
175 and expand_eh_region_end (handler) is used to both designate the
176 end of the region and to associate a specified handler/cleanup with
177 the region. The rtl code in HANDLER will be invoked whenever an
178 exception occurs in the region between the calls to
179 expand_eh_region_start and expand_eh_region_end. After HANDLER is
180 executed, additional code is emitted to handle rethrowing the
181 exception to the outer exception handler. The code for HANDLER will
182 be emitted at the end of the function.
183
184 TARGET_EXPRs can also be used to designate exception regions. A
185 TARGET_EXPR gives an unwind-protect style interface commonly used
186 in functional languages such as LISP. The associated expression is
187 evaluated, and whether or not it (or any of the functions that it
188 calls) throws an exception, the protect expression is always
189 invoked. This implementation takes care of the details of
190 associating an exception table entry with the expression and
191 generating the necessary code (it actually emits the protect
192 expression twice, once for normal flow and once for the exception
193 case). As for the other handlers, the code for the exception case
194 will be emitted at the end of the function.
195
196 Cleanups can also be specified by using add_partial_entry (handler)
197 and end_protect_partials. add_partial_entry creates the start of
198 a new exception region; HANDLER will be invoked if an exception is
199 thrown with the context of the region between the calls to
200 add_partial_entry and end_protect_partials. end_protect_partials is
201 used to mark the end of these regions. add_partial_entry can be
202 called as many times as needed before calling end_protect_partials.
203 However, end_protect_partials should only be invoked once for each
204 group of calls to add_partial_entry as the entries are queued
205 and all of the outstanding entries are processed simultaneously
206 when end_protect_partials is invoked. Similarly to the other
207 handlers, the code for HANDLER will be emitted at the end of the
208 function.
209
210 The generated RTL for an exception region includes
211 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
212 the start and end of the exception region. A unique label is also
213 generated at the start of the exception region, which is available
214 by looking at the ehstack variable. The topmost entry corresponds
215 to the current region.
216
217 In the current implementation, an exception can only be thrown from
218 a function call (since the mechanism used to actually throw an
219 exception involves calling __throw). If an exception region is
220 created but no function calls occur within that region, the region
221 can be safely optimized away (along with its exception handlers)
222 since no exceptions can ever be caught in that region. This
223 optimization is performed unless -fasynchronous-exceptions is
224 given. If the user wishes to throw from a signal handler, or other
225 asynchronous place, -fasynchronous-exceptions should be used when
226 compiling for maximally correct code, at the cost of additional
227 exception regions. Using -fasynchronous-exceptions only produces
228 code that is reasonably safe in such situations, but a correct
229 program cannot rely upon this working. It can be used in failsafe
230 code, where trying to continue on, and proceeding with potentially
231 incorrect results is better than halting the program.
232
233
234 Unwinding the stack:
235
236 The details of unwinding the stack to the next frame can be rather
237 complex. While in many cases a generic __unwind_function routine
238 can be used by the generated exception handling code to do this, it
239 is often necessary to generate inline code to do the unwinding.
240
241 Whether or not these inlined unwinders are necessary is
242 target-specific.
243
244 By default, if the target-specific backend doesn't supply a
245 definition for __unwind_function, inlined unwinders will be used
246 instead. The main tradeoff here is in text space utilization.
247 Obviously, if inline unwinders have to be generated repeatedly,
248 this uses much more space than if a single routine is used.
249
250 However, it is simply not possible on some platforms to write a
251 generalized routine for doing stack unwinding without having some
252 form of additional data associated with each function. The current
253 implementation encodes this data in the form of additional machine
254 instructions. This is clearly not desirable, as it is extremely
255 inefficient. The next implementation will provide a set of metadata
256 for each function that will provide the needed information.
257
258 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize
259 whether or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER
260 is defined and has a non-zero value, a per-function unwinder is
261 not emitted for the current function.
262
263 On some platforms it is possible that neither __unwind_function
264 nor inlined unwinders are available. For these platforms it is not
265 possible to throw through a function call, and abort will be
266 invoked instead of performing the throw.
267
268 Future directions:
269
270 Currently __throw makes no differentiation between cleanups and
271 user-defined exception regions. While this makes the implementation
272 simple, it also implies that it is impossible to determine if a
273 user-defined exception handler exists for a given exception without
274 completely unwinding the stack in the process. This is undesirable
275 from the standpoint of debugging, as ideally it would be possible
276 to trap unhandled exceptions in the debugger before the process of
277 unwinding has even started.
278
279 This problem can be solved by marking user-defined handlers in a
280 special way (probably by adding additional bits to exception_table_list).
281 A two-pass scheme could then be used by __throw to iterate
282 through the table. The first pass would search for a relevant
283 user-defined handler for the current context of the throw, and if
284 one is found, the second pass would then invoke all needed cleanups
285 before jumping to the user-defined handler.
286
287 Many languages (including C++ and Ada) make execution of a
288 user-defined handler conditional on the "type" of the exception
289 thrown. (The type of the exception is actually the type of the data
290 that is thrown with the exception.) It will thus be necessary for
291 __throw to be able to determine if a given user-defined
292 exception handler will actually be executed, given the type of
293 exception.
294
295 One scheme is to add additional information to exception_table_list
296 as to the types of exceptions accepted by each handler. __throw
297 can do the type comparisons and then determine if the handler is
298 actually going to be executed.
299
300 There is currently no significant level of debugging support
301 available, other than to place a breakpoint on __throw. While
302 this is sufficient in most cases, it would be helpful to be able to
303 know where a given exception was going to be thrown to before it is
304 actually thrown, and to be able to choose between stopping before
305 every exception region (including cleanups), or just user-defined
306 exception regions. This should be possible to do in the two-pass
307 scheme by adding additional labels to __throw for appropriate
308 breakpoints, and additional debugger commands could be added to
309 query various state variables to determine what actions are to be
310 performed next.
311
312 Another major problem that is being worked on is the issue with
313 stack unwinding on various platforms. Currently the only platform
314 that has support for __unwind_function is the Sparc; all other
315 ports require per-function unwinders, which causes large amounts of
316 code bloat.
317
318 Ideally it would be possible to store a small set of metadata with
319 each function that would then make it possible to write a
320 __unwind_function for every platform. This would eliminate the
321 need for per-function unwinders.
322
323 The main reason the data is needed is that on some platforms the
324 order and types of data stored on the stack can vary depending on
325 the type of function, its arguments and returned values, and the
326 compilation options used (optimization versus non-optimization,
327 -fomit-frame-pointer, processor variations, etc).
328
329 Unfortunately, this also means that throwing through functions that
330 aren't compiled with exception handling support will still not be
331 possible on some platforms. This problem is currently being
332 investigated, but no solutions have been found that do not imply
333 some unacceptable performance penalties.
334
335 For setjmp/longjmp based exception handling, some of the details
336 are as above, but there are some additional details. This section
337 discusses the details.
338
339 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
340 optimize EH regions yet. We don't have to worry about machine
341 specific issues with unwinding the stack, as we rely upon longjmp
342 for all the machine specific details. There is no variable context
343 of a throw, just the one implied by the dynamic handler stack
344 pointed to by the dynamic handler chain. There is no exception
345 table, and no calls to __register_excetpions. __sjthrow is used
346 instead of __throw, and it works by using the dynamic handler
347 chain, and longjmp. -fasynchronous-exceptions has no effect, as
348 the elimination of trivial exception regions is not yet performed.
349
350 A frontend can set protect_cleanup_actions_with_terminate when all
351 the cleanup actions should be protected with an EH region that
352 calls terminate when an unhandled exception is throw. C++ does
353 this, Ada does not. */
354
355
356 #include "config.h"
357 #include <stdio.h>
358 #include "rtl.h"
359 #include "tree.h"
360 #include "flags.h"
361 #include "except.h"
362 #include "function.h"
363 #include "insn-flags.h"
364 #include "expr.h"
365 #include "insn-codes.h"
366 #include "regs.h"
367 #include "hard-reg-set.h"
368 #include "insn-config.h"
369 #include "recog.h"
370 #include "output.h"
371 #include "assert.h"
372
373 /* One to use setjmp/longjmp method of generating code for exception
374 handling. */
375
376 int exceptions_via_longjmp = 1;
377
378 /* One to enable asynchronous exception support. */
379
380 int asynchronous_exceptions = 0;
381
382 /* One to protect cleanup actions with a handler that calls
383 __terminate, zero otherwise. */
384
385 int protect_cleanup_actions_with_terminate = 0;
386
387 /* A list of labels used for exception handlers. Created by
388 find_exception_handler_labels for the optimization passes. */
389
390 rtx exception_handler_labels;
391
392 /* Nonzero means that __throw was invoked.
393
394 This is used by the C++ frontend to know if code needs to be emitted
395 for __throw or not. */
396
397 int throw_used;
398
399 /* The dynamic handler chain. Nonzero if the function has already
400 fetched a pointer to the dynamic handler chain for exception
401 handling. */
402
403 rtx current_function_dhc;
404
405 /* The dynamic cleanup chain. Nonzero if the function has already
406 fetched a pointer to the dynamic cleanup chain for exception
407 handling. */
408
409 rtx current_function_dcc;
410
411 /* A stack used for keeping track of the currectly active exception
412 handling region. As each exception region is started, an entry
413 describing the region is pushed onto this stack. The current
414 region can be found by looking at the top of the stack, and as we
415 exit regions, the corresponding entries are popped.
416
417 Entries cannot overlap; they can be nested. So there is only one
418 entry at most that corresponds to the current instruction, and that
419 is the entry on the top of the stack. */
420
421 static struct eh_stack ehstack;
422
423 /* A queue used for tracking which exception regions have closed but
424 whose handlers have not yet been expanded. Regions are emitted in
425 groups in an attempt to improve paging performance.
426
427 As we exit a region, we enqueue a new entry. The entries are then
428 dequeued during expand_leftover_cleanups and expand_start_all_catch,
429
430 We should redo things so that we either take RTL for the handler,
431 or we expand the handler expressed as a tree immediately at region
432 end time. */
433
434 static struct eh_queue ehqueue;
435
436 /* Insns for all of the exception handlers for the current function.
437 They are currently emitted by the frontend code. */
438
439 rtx catch_clauses;
440
441 /* A TREE_CHAINed list of handlers for regions that are not yet
442 closed. The TREE_VALUE of each entry contains the handler for the
443 corresponding entry on the ehstack. */
444
445 static tree protect_list;
446
447 /* Stacks to keep track of various labels. */
448
449 /* Keeps track of the label to resume to should one want to resume
450 normal control flow out of a handler (instead of, say, returning to
451 the caller of the current function or exiting the program). Also
452 used as the context of a throw to rethrow an exception to the outer
453 exception region. */
454
455 struct label_node *caught_return_label_stack = NULL;
456
457 /* A random data area for the front end's own use. */
458
459 struct label_node *false_label_stack = NULL;
460
461 /* The rtx and the tree for the saved PC value. */
462
463 rtx eh_saved_pc_rtx;
464 tree eh_saved_pc;
465
466 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
467 \f
468 /* Various support routines to manipulate the various data structures
469 used by the exception handling code. */
470
471 /* Push a label entry onto the given STACK. */
472
473 void
474 push_label_entry (stack, rlabel, tlabel)
475 struct label_node **stack;
476 rtx rlabel;
477 tree tlabel;
478 {
479 struct label_node *newnode
480 = (struct label_node *) xmalloc (sizeof (struct label_node));
481
482 if (rlabel)
483 newnode->u.rlabel = rlabel;
484 else
485 newnode->u.tlabel = tlabel;
486 newnode->chain = *stack;
487 *stack = newnode;
488 }
489
490 /* Pop a label entry from the given STACK. */
491
492 rtx
493 pop_label_entry (stack)
494 struct label_node **stack;
495 {
496 rtx label;
497 struct label_node *tempnode;
498
499 if (! *stack)
500 return NULL_RTX;
501
502 tempnode = *stack;
503 label = tempnode->u.rlabel;
504 *stack = (*stack)->chain;
505 free (tempnode);
506
507 return label;
508 }
509
510 /* Return the top element of the given STACK. */
511
512 tree
513 top_label_entry (stack)
514 struct label_node **stack;
515 {
516 if (! *stack)
517 return NULL_TREE;
518
519 return (*stack)->u.tlabel;
520 }
521
522 /* Make a copy of ENTRY using xmalloc to allocate the space. */
523
524 static struct eh_entry *
525 copy_eh_entry (entry)
526 struct eh_entry *entry;
527 {
528 struct eh_entry *newentry;
529
530 newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
531 bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
532
533 return newentry;
534 }
535
536 /* Push a new eh_node entry onto STACK. */
537
538 static void
539 push_eh_entry (stack)
540 struct eh_stack *stack;
541 {
542 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
543 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
544
545 entry->outer_context = gen_label_rtx ();
546 entry->exception_handler_label = gen_label_rtx ();
547 entry->finalization = NULL_TREE;
548
549 node->entry = entry;
550 node->chain = stack->top;
551 stack->top = node;
552 }
553
554 /* Pop an entry from the given STACK. */
555
556 static struct eh_entry *
557 pop_eh_entry (stack)
558 struct eh_stack *stack;
559 {
560 struct eh_node *tempnode;
561 struct eh_entry *tempentry;
562
563 tempnode = stack->top;
564 tempentry = tempnode->entry;
565 stack->top = stack->top->chain;
566 free (tempnode);
567
568 return tempentry;
569 }
570
571 /* Enqueue an ENTRY onto the given QUEUE. */
572
573 static void
574 enqueue_eh_entry (queue, entry)
575 struct eh_queue *queue;
576 struct eh_entry *entry;
577 {
578 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
579
580 node->entry = entry;
581 node->chain = NULL;
582
583 if (queue->head == NULL)
584 {
585 queue->head = node;
586 }
587 else
588 {
589 queue->tail->chain = node;
590 }
591 queue->tail = node;
592 }
593
594 /* Dequeue an entry from the given QUEUE. */
595
596 static struct eh_entry *
597 dequeue_eh_entry (queue)
598 struct eh_queue *queue;
599 {
600 struct eh_node *tempnode;
601 struct eh_entry *tempentry;
602
603 if (queue->head == NULL)
604 return NULL;
605
606 tempnode = queue->head;
607 queue->head = queue->head->chain;
608
609 tempentry = tempnode->entry;
610 free (tempnode);
611
612 return tempentry;
613 }
614 \f
615 /* Routine to see if exception exception handling is turned on.
616 DO_WARN is non-zero if we want to inform the user that exception
617 handling is turned off.
618
619 This is used to ensure that -fexceptions has been specified if the
620 compiler tries to use any exception-specific functions. */
621
622 int
623 doing_eh (do_warn)
624 int do_warn;
625 {
626 if (! flag_exceptions)
627 {
628 static int warned = 0;
629 if (! warned && do_warn)
630 {
631 error ("exception handling disabled, use -fexceptions to enable");
632 warned = 1;
633 }
634 return 0;
635 }
636 return 1;
637 }
638
639 /* Given a return address in ADDR, determine the address we should use
640 to find the corresponding EH region. */
641
642 rtx
643 eh_outer_context (addr)
644 rtx addr;
645 {
646 /* First mask out any unwanted bits. */
647 #ifdef MASK_RETURN_ADDR
648 expand_binop (Pmode, and_optab, addr, MASK_RETURN_ADDR, addr,
649 1, OPTAB_LIB_WIDEN);
650 #endif
651
652 /* Then subtract out enough to get into the appropriate region. If
653 this is defined, assume we don't need to subtract anything as it
654 is already within the correct region. */
655 #if ! defined (RETURN_ADDR_OFFSET)
656 addr = plus_constant (addr, -1);
657 #endif
658
659 return addr;
660 }
661
662 /* Start a new exception region for a region of code that has a
663 cleanup action and push the HANDLER for the region onto
664 protect_list. All of the regions created with add_partial_entry
665 will be ended when end_protect_partials is invoked. */
666
667 void
668 add_partial_entry (handler)
669 tree handler;
670 {
671 expand_eh_region_start ();
672
673 /* Make sure the entry is on the correct obstack. */
674 push_obstacks_nochange ();
675 resume_temporary_allocation ();
676
677 /* Because this is a cleanup action, we may have to protect the handler
678 with __terminate. */
679 handler = protect_with_terminate (handler);
680
681 protect_list = tree_cons (NULL_TREE, handler, protect_list);
682 pop_obstacks ();
683 }
684
685 /* Get a reference to the dynamic handler chain. It points to the
686 pointer to the next element in the dynamic handler chain. It ends
687 when there are no more elements in the dynamic handler chain, when
688 the value is &top_elt from libgcc2.c. Immediately after the
689 pointer, is an area suitable for setjmp/longjmp when
690 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
691 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
692 isn't defined.
693
694 This routine is here to facilitate the porting of this code to
695 systems with threads. One can either replace the routine we emit a
696 call for here in libgcc2.c, or one can modify this routine to work
697 with their thread system. */
698
699 rtx
700 get_dynamic_handler_chain ()
701 {
702 #if 0
703 /* Do this once we figure out how to get this to the front of the
704 function, and we really only want one per real function, not one
705 per inlined function. */
706 if (current_function_dhc == 0)
707 {
708 rtx dhc, insns;
709 start_sequence ();
710
711 dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
712 NULL_RTX, 1,
713 Pmode, 0);
714 current_function_dhc = copy_to_reg (dhc);
715 insns = get_insns ();
716 end_sequence ();
717 emit_insns_before (insns, get_first_nonparm_insn ());
718 }
719 #else
720 rtx dhc;
721 dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
722 NULL_RTX, 1,
723 Pmode, 0);
724 current_function_dhc = copy_to_reg (dhc);
725 #endif
726
727 /* We don't want a copy of the dhc, but rather, the single dhc. */
728 return gen_rtx (MEM, Pmode, current_function_dhc);
729 }
730
731 /* Get a reference to the dynamic cleanup chain. It points to the
732 pointer to the next element in the dynamic cleanup chain.
733 Immediately after the pointer, are two Pmode variables, one for a
734 pointer to a function that performs the cleanup action, and the
735 second, the argument to pass to that function. */
736
737 rtx
738 get_dynamic_cleanup_chain ()
739 {
740 rtx dhc, dcc;
741
742 dhc = get_dynamic_handler_chain ();
743 dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
744
745 current_function_dcc = copy_to_reg (dcc);
746
747 /* We don't want a copy of the dcc, but rather, the single dcc. */
748 return gen_rtx (MEM, Pmode, current_function_dcc);
749 }
750
751 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
752 LABEL is an rtx of code CODE_LABEL, in this function. */
753
754 void
755 jumpif_rtx (x, label)
756 rtx x;
757 rtx label;
758 {
759 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
760 }
761
762 /* Generate code to evaluate X and jump to LABEL if the value is zero.
763 LABEL is an rtx of code CODE_LABEL, in this function. */
764
765 void
766 jumpifnot_rtx (x, label)
767 rtx x;
768 rtx label;
769 {
770 jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
771 }
772
773 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
774 We just need to create an element for the cleanup list, and push it
775 into the chain.
776
777 A dynamic cleanup is a cleanup action implied by the presence of an
778 element on the EH runtime dynamic cleanup stack that is to be
779 performed when an exception is thrown. The cleanup action is
780 performed by __sjthrow when an exception is thrown. Only certain
781 actions can be optimized into dynamic cleanup actions. For the
782 restrictions on what actions can be performed using this routine,
783 see expand_eh_region_start_tree. */
784
785 static void
786 start_dynamic_cleanup (func, arg)
787 tree func;
788 tree arg;
789 {
790 rtx dhc, dcc;
791 rtx new_func, new_arg;
792 rtx x, buf;
793 int size;
794
795 /* We allocate enough room for a pointer to the function, and
796 one argument. */
797 size = 2;
798
799 /* XXX, FIXME: The stack space allocated this way is too long lived,
800 but there is no allocation routine that allocates at the level of
801 the last binding contour. */
802 buf = assign_stack_local (BLKmode,
803 GET_MODE_SIZE (Pmode)*(size+1),
804 0);
805
806 buf = change_address (buf, Pmode, NULL_RTX);
807
808 /* Store dcc into the first word of the newly allocated buffer. */
809
810 dcc = get_dynamic_cleanup_chain ();
811 emit_move_insn (buf, dcc);
812
813 /* Store func and arg into the cleanup list element. */
814
815 new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
816 GET_MODE_SIZE (Pmode)));
817 new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
818 GET_MODE_SIZE (Pmode)*2));
819 x = expand_expr (func, new_func, Pmode, 0);
820 if (x != new_func)
821 emit_move_insn (new_func, x);
822
823 x = expand_expr (arg, new_arg, Pmode, 0);
824 if (x != new_arg)
825 emit_move_insn (new_arg, x);
826
827 /* Update the cleanup chain. */
828
829 emit_move_insn (dcc, XEXP (buf, 0));
830 }
831
832 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
833 handler stack. This should only be used by expand_eh_region_start
834 or expand_eh_region_start_tree. */
835
836 static void
837 start_dynamic_handler ()
838 {
839 rtx dhc, dcc;
840 rtx x, arg, buf;
841 int size;
842
843 #ifndef DONT_USE_BUILTIN_SETJMP
844 /* The number of Pmode words for the setjmp buffer, when using the
845 builtin setjmp/longjmp, see expand_builtin, case
846 BUILT_IN_LONGJMP. */
847 size = 5;
848 #else
849 #ifdef JMP_BUF_SIZE
850 size = JMP_BUF_SIZE;
851 #else
852 /* Should be large enough for most systems, if it is not,
853 JMP_BUF_SIZE should be defined with the proper value. It will
854 also tend to be larger than necessary for most systems, a more
855 optimal port will define JMP_BUF_SIZE. */
856 size = FIRST_PSEUDO_REGISTER+2;
857 #endif
858 #endif
859 /* XXX, FIXME: The stack space allocated this way is too long lived,
860 but there is no allocation routine that allocates at the level of
861 the last binding contour. */
862 arg = assign_stack_local (BLKmode,
863 GET_MODE_SIZE (Pmode)*(size+1),
864 0);
865
866 arg = change_address (arg, Pmode, NULL_RTX);
867
868 /* Store dhc into the first word of the newly allocated buffer. */
869
870 dhc = get_dynamic_handler_chain ();
871 dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
872 GET_MODE_SIZE (Pmode)));
873 emit_move_insn (arg, dhc);
874
875 /* Zero out the start of the cleanup chain. */
876 emit_move_insn (dcc, const0_rtx);
877
878 /* The jmpbuf starts two words into the area allocated. */
879 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
880
881 #ifdef DONT_USE_BUILTIN_SETJMP
882 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
883 buf, Pmode);
884 #else
885 x = expand_builtin_setjmp (buf, NULL_RTX);
886 #endif
887
888 /* If we come back here for a catch, transfer control to the
889 handler. */
890
891 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
892
893 /* We are committed to this, so update the handler chain. */
894
895 emit_move_insn (dhc, XEXP (arg, 0));
896 }
897
898 /* Start an exception handling region for the given cleanup action.
899 All instructions emitted after this point are considered to be part
900 of the region until expand_eh_region_end is invoked. CLEANUP is
901 the cleanup action to perform. The return value is true if the
902 exception region was optimized away. If that case,
903 expand_eh_region_end does not need to be called for this cleanup,
904 nor should it be.
905
906 This routine notices one particular common case in C++ code
907 generation, and optimizes it so as to not need the exception
908 region. It works by creating a dynamic cleanup action, instead of
909 of a using an exception region. */
910
911 int
912 expand_eh_region_start_tree (decl, cleanup)
913 tree decl;
914 tree cleanup;
915 {
916 rtx note;
917
918 /* This is the old code. */
919 if (! doing_eh (0))
920 return 0;
921
922 /* The optimization only applies to actions protected with
923 terminate, and only applies if we are using the setjmp/longjmp
924 codegen method. */
925 if (exceptions_via_longjmp
926 && protect_cleanup_actions_with_terminate)
927 {
928 tree func, arg;
929 tree args;
930
931 /* Ignore any UNSAVE_EXPR. */
932 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
933 cleanup = TREE_OPERAND (cleanup, 0);
934
935 /* Further, it only applies if the action is a call, if there
936 are 2 arguments, and if the second argument is 2. */
937
938 if (TREE_CODE (cleanup) == CALL_EXPR
939 && (args = TREE_OPERAND (cleanup, 1))
940 && (func = TREE_OPERAND (cleanup, 0))
941 && (arg = TREE_VALUE (args))
942 && (args = TREE_CHAIN (args))
943
944 /* is the second argument 2? */
945 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
946 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
947 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
948
949 /* Make sure there are no other arguments. */
950 && TREE_CHAIN (args) == NULL_TREE)
951 {
952 /* Arrange for returns and gotos to pop the entry we make on the
953 dynamic cleanup stack. */
954 expand_dcc_cleanup (decl);
955 start_dynamic_cleanup (func, arg);
956 return 1;
957 }
958 }
959
960 expand_eh_region_start_for_decl (decl);
961
962 return 0;
963 }
964
965 /* Just like expand_eh_region_start, except if a cleanup action is
966 entered on the cleanup chain, the TREE_PURPOSE of the element put
967 on the chain is DECL. DECL should be the associated VAR_DECL, if
968 any, otherwise it should be NULL_TREE. */
969
970 void
971 expand_eh_region_start_for_decl (decl)
972 tree decl;
973 {
974 rtx note;
975
976 /* This is the old code. */
977 if (! doing_eh (0))
978 return;
979
980 if (exceptions_via_longjmp)
981 {
982 /* We need a new block to record the start and end of the
983 dynamic handler chain. We could always do this, but we
984 really want to permit jumping into such a block, and we want
985 to avoid any errors or performance impact in the SJ EH code
986 for now. */
987 expand_start_bindings (0);
988
989 /* But we don't need or want a new temporary level. */
990 pop_temp_slots ();
991
992 /* Mark this block as created by expand_eh_region_start. This
993 is so that we can pop the block with expand_end_bindings
994 automatically. */
995 mark_block_as_eh_region ();
996
997 /* Arrange for returns and gotos to pop the entry we make on the
998 dynamic handler stack. */
999 expand_dhc_cleanup (decl);
1000 }
1001
1002 if (exceptions_via_longjmp == 0)
1003 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1004 push_eh_entry (&ehstack);
1005 if (exceptions_via_longjmp == 0)
1006 NOTE_BLOCK_NUMBER (note)
1007 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1008 if (exceptions_via_longjmp)
1009 start_dynamic_handler ();
1010 }
1011
1012 /* Start an exception handling region. All instructions emitted after
1013 this point are considered to be part of the region until
1014 expand_eh_region_end is invoked. */
1015
1016 void
1017 expand_eh_region_start ()
1018 {
1019 expand_eh_region_start_for_decl (NULL_TREE);
1020 }
1021
1022 /* End an exception handling region. The information about the region
1023 is found on the top of ehstack.
1024
1025 HANDLER is either the cleanup for the exception region, or if we're
1026 marking the end of a try block, HANDLER is integer_zero_node.
1027
1028 HANDLER will be transformed to rtl when expand_leftover_cleanups
1029 is invoked. */
1030
1031 void
1032 expand_eh_region_end (handler)
1033 tree handler;
1034 {
1035 struct eh_entry *entry;
1036
1037 if (! doing_eh (0))
1038 return;
1039
1040 entry = pop_eh_entry (&ehstack);
1041
1042 if (exceptions_via_longjmp == 0)
1043 {
1044 rtx label;
1045 rtx note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1046 NOTE_BLOCK_NUMBER (note) = CODE_LABEL_NUMBER (entry->exception_handler_label);
1047
1048 label = gen_label_rtx ();
1049 emit_jump (label);
1050
1051 /* Emit a label marking the end of this exception region that
1052 is used for rethrowing into the outer context. */
1053 emit_label (entry->outer_context);
1054
1055 /* Put in something that takes up space, as otherwise the end
1056 address for this EH region could have the exact same address as
1057 its outer region. This would cause us to miss the fact that
1058 resuming exception handling with this PC value would be inside
1059 the outer region. */
1060 emit_insn (gen_nop ());
1061 emit_barrier ();
1062 emit_label (label);
1063 }
1064
1065 entry->finalization = handler;
1066
1067 enqueue_eh_entry (&ehqueue, entry);
1068
1069 /* If we have already started ending the bindings, don't recurse.
1070 This only happens when exceptions_via_longjmp is true. */
1071 if (is_eh_region ())
1072 {
1073 /* Because we don't need or want a new temporary level and
1074 because we didn't create one in expand_eh_region_start,
1075 create a fake one now to avoid removing one in
1076 expand_end_bindings. */
1077 push_temp_slots ();
1078
1079 mark_block_as_not_eh_region ();
1080
1081 /* Maybe do this to prevent jumping in and so on... */
1082 expand_end_bindings (NULL_TREE, 0, 0);
1083 }
1084 }
1085
1086 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1087 call to __sjthrow.
1088
1089 Otherwise, we emit a call to __throw and note that we threw
1090 something, so we know we need to generate the necessary code for
1091 __throw.
1092
1093 Before invoking throw, the __eh_pc variable must have been set up
1094 to contain the PC being thrown from. This address is used by
1095 __throw to determine which exception region (if any) is
1096 responsible for handling the exception. */
1097
1098 void
1099 emit_throw ()
1100 {
1101 if (exceptions_via_longjmp)
1102 {
1103 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1104 }
1105 else
1106 {
1107 #ifdef JUMP_TO_THROW
1108 emit_indirect_jump (throw_libfunc);
1109 #else
1110 SYMBOL_REF_USED (throw_libfunc) = 1;
1111 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1112 #endif
1113 throw_used = 1;
1114 }
1115 emit_barrier ();
1116 }
1117
1118 /* An internal throw with an indirect CONTEXT we want to throw from.
1119 CONTEXT evaluates to the context of the throw. */
1120
1121 static void
1122 expand_internal_throw_indirect (context)
1123 rtx context;
1124 {
1125 assemble_external (eh_saved_pc);
1126 emit_move_insn (eh_saved_pc_rtx, context);
1127 emit_throw ();
1128 }
1129
1130 /* An internal throw with a direct CONTEXT we want to throw from.
1131 CONTEXT must be a label; its address will be used as the context of
1132 the throw. */
1133
1134 void
1135 expand_internal_throw (context)
1136 rtx context;
1137 {
1138 expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context));
1139 }
1140
1141 /* Called from expand_exception_blocks and expand_end_catch_block to
1142 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1143
1144 void
1145 expand_leftover_cleanups ()
1146 {
1147 struct eh_entry *entry;
1148
1149 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1150 {
1151 rtx prev;
1152
1153 /* A leftover try block. Shouldn't be one here. */
1154 if (entry->finalization == integer_zero_node)
1155 abort ();
1156
1157 /* Output the label for the start of the exception handler. */
1158 emit_label (entry->exception_handler_label);
1159
1160 #ifdef HAVE_exception_receiver
1161 if (! exceptions_via_longjmp)
1162 if (HAVE_exception_receiver)
1163 emit_insn (gen_exception_receiver ());
1164 #endif
1165
1166 #ifdef HAVE_nonlocal_goto_receiver
1167 if (! exceptions_via_longjmp)
1168 if (HAVE_nonlocal_goto_receiver)
1169 emit_insn (gen_nonlocal_goto_receiver ());
1170 #endif
1171
1172 /* And now generate the insns for the handler. */
1173 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1174
1175 prev = get_last_insn ();
1176 if (prev == NULL || GET_CODE (prev) != BARRIER)
1177 {
1178 if (exceptions_via_longjmp)
1179 emit_throw ();
1180 else
1181 {
1182 /* The below can be optimized away, and we could just
1183 fall into the next EH handler, if we are certain they
1184 are nested. */
1185 /* Emit code to throw to the outer context if we fall off
1186 the end of the handler. */
1187 expand_internal_throw (entry->outer_context);
1188 }
1189 }
1190
1191 free (entry);
1192 }
1193 }
1194
1195 /* Called at the start of a block of try statements. */
1196 void
1197 expand_start_try_stmts ()
1198 {
1199 if (! doing_eh (1))
1200 return;
1201
1202 expand_eh_region_start ();
1203 }
1204
1205 /* Generate RTL for the start of a group of catch clauses.
1206
1207 It is responsible for starting a new instruction sequence for the
1208 instructions in the catch block, and expanding the handlers for the
1209 internally-generated exception regions nested within the try block
1210 corresponding to this catch block. */
1211
1212 void
1213 expand_start_all_catch ()
1214 {
1215 struct eh_entry *entry;
1216 tree label;
1217
1218 if (! doing_eh (1))
1219 return;
1220
1221 /* End the try block. */
1222 expand_eh_region_end (integer_zero_node);
1223
1224 emit_line_note (input_filename, lineno);
1225 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1226
1227 /* The label for the exception handling block that we will save.
1228 This is Lresume in the documention. */
1229 expand_label (label);
1230
1231 if (exceptions_via_longjmp == 0)
1232 {
1233 /* Put in something that takes up space, as otherwise the end
1234 address for the EH region could have the exact same address as
1235 the outer region, causing us to miss the fact that resuming
1236 exception handling with this PC value would be inside the outer
1237 region. */
1238 emit_insn (gen_nop ());
1239 }
1240
1241 /* Push the label that points to where normal flow is resumed onto
1242 the top of the label stack. */
1243 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1244
1245 /* Start a new sequence for all the catch blocks. We will add this
1246 to the global sequence catch_clauses when we have completed all
1247 the handlers in this handler-seq. */
1248 start_sequence ();
1249
1250 while (1)
1251 {
1252 rtx prev;
1253
1254 entry = dequeue_eh_entry (&ehqueue);
1255 /* Emit the label for the exception handler for this region, and
1256 expand the code for the handler.
1257
1258 Note that a catch region is handled as a side-effect here;
1259 for a try block, entry->finalization will contain
1260 integer_zero_node, so no code will be generated in the
1261 expand_expr call below. But, the label for the handler will
1262 still be emitted, so any code emitted after this point will
1263 end up being the handler. */
1264 emit_label (entry->exception_handler_label);
1265
1266 #ifdef HAVE_exception_receiver
1267 if (! exceptions_via_longjmp)
1268 if (HAVE_exception_receiver)
1269 emit_insn (gen_exception_receiver ());
1270 #endif
1271
1272 #ifdef HAVE_nonlocal_goto_receiver
1273 if (! exceptions_via_longjmp)
1274 if (HAVE_nonlocal_goto_receiver)
1275 emit_insn (gen_nonlocal_goto_receiver ());
1276 #endif
1277
1278 /* When we get down to the matching entry for this try block, stop. */
1279 if (entry->finalization == integer_zero_node)
1280 {
1281 /* Don't forget to free this entry. */
1282 free (entry);
1283 break;
1284 }
1285
1286 /* And now generate the insns for the handler. */
1287 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1288
1289 prev = get_last_insn ();
1290 if (prev == NULL || GET_CODE (prev) != BARRIER)
1291 {
1292 if (exceptions_via_longjmp)
1293 emit_throw ();
1294 else
1295 {
1296 /* Code to throw out to outer context when we fall off end
1297 of the handler. We can't do this here for catch blocks,
1298 so it's done in expand_end_all_catch instead.
1299
1300 The below can be optimized away (and we could just fall
1301 into the next EH handler) if we are certain they are
1302 nested. */
1303
1304 expand_internal_throw (entry->outer_context);
1305 }
1306 }
1307 free (entry);
1308 }
1309 }
1310
1311 /* Finish up the catch block. At this point all the insns for the
1312 catch clauses have already been generated, so we only have to add
1313 them to the catch_clauses list. We also want to make sure that if
1314 we fall off the end of the catch clauses that we rethrow to the
1315 outer EH region. */
1316
1317 void
1318 expand_end_all_catch ()
1319 {
1320 rtx new_catch_clause;
1321
1322 if (! doing_eh (1))
1323 return;
1324
1325 if (exceptions_via_longjmp)
1326 emit_throw ();
1327 else
1328 {
1329 /* Code to throw out to outer context, if we fall off end of catch
1330 handlers. This is rethrow (Lresume, same id, same obj) in the
1331 documentation. We use Lresume because we know that it will throw
1332 to the correct context.
1333
1334 In other words, if the catch handler doesn't exit or return, we
1335 do a "throw" (using the address of Lresume as the point being
1336 thrown from) so that the outer EH region can then try to process
1337 the exception. */
1338
1339 expand_internal_throw (DECL_RTL (top_label_entry (&caught_return_label_stack)));
1340 }
1341
1342 /* Now we have the complete catch sequence. */
1343 new_catch_clause = get_insns ();
1344 end_sequence ();
1345
1346 /* This level of catch blocks is done, so set up the successful
1347 catch jump label for the next layer of catch blocks. */
1348 pop_label_entry (&caught_return_label_stack);
1349
1350 /* Add the new sequence of catches to the main one for this function. */
1351 push_to_sequence (catch_clauses);
1352 emit_insns (new_catch_clause);
1353 catch_clauses = get_insns ();
1354 end_sequence ();
1355
1356 /* Here we fall through into the continuation code. */
1357 }
1358
1359 /* End all the pending exception regions on protect_list. The handlers
1360 will be emitted when expand_leftover_cleanups is invoked. */
1361
1362 void
1363 end_protect_partials ()
1364 {
1365 while (protect_list)
1366 {
1367 expand_eh_region_end (TREE_VALUE (protect_list));
1368 protect_list = TREE_CHAIN (protect_list);
1369 }
1370 }
1371
1372 /* Arrange for __terminate to be called if there is an unhandled throw
1373 from within E. */
1374
1375 tree
1376 protect_with_terminate (e)
1377 tree e;
1378 {
1379 /* We only need to do this when using setjmp/longjmp EH and the
1380 language requires it, as otherwise we protect all of the handlers
1381 at once, if we need to. */
1382 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1383 {
1384 tree handler, result;
1385
1386 /* All cleanups must be on the function_obstack. */
1387 push_obstacks_nochange ();
1388 resume_temporary_allocation ();
1389
1390 handler = make_node (RTL_EXPR);
1391 TREE_TYPE (handler) = void_type_node;
1392 RTL_EXPR_RTL (handler) = const0_rtx;
1393 TREE_SIDE_EFFECTS (handler) = 1;
1394 start_sequence_for_rtl_expr (handler);
1395
1396 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1397 emit_barrier ();
1398
1399 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1400 end_sequence ();
1401
1402 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1403 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1404 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1405 TREE_READONLY (result) = TREE_READONLY (e);
1406
1407 pop_obstacks ();
1408
1409 e = result;
1410 }
1411
1412 return e;
1413 }
1414 \f
1415 /* The exception table that we build that is used for looking up and
1416 dispatching exceptions, the current number of entries, and its
1417 maximum size before we have to extend it.
1418
1419 The number in eh_table is the code label number of the exception
1420 handler for the region. This is added by add_eh_table_entry and
1421 used by output_exception_table_entry. */
1422
1423 static int *eh_table;
1424 static int eh_table_size;
1425 static int eh_table_max_size;
1426
1427 /* Note the need for an exception table entry for region N. If we
1428 don't need to output an explicit exception table, avoid all of the
1429 extra work.
1430
1431 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1432 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1433 label number of the exception handler for the region. */
1434
1435 void
1436 add_eh_table_entry (n)
1437 int n;
1438 {
1439 #ifndef OMIT_EH_TABLE
1440 if (eh_table_size >= eh_table_max_size)
1441 {
1442 if (eh_table)
1443 {
1444 eh_table_max_size += eh_table_max_size>>1;
1445
1446 if (eh_table_max_size < 0)
1447 abort ();
1448
1449 if ((eh_table = (int *) realloc (eh_table,
1450 eh_table_max_size * sizeof (int)))
1451 == 0)
1452 fatal ("virtual memory exhausted");
1453 }
1454 else
1455 {
1456 eh_table_max_size = 252;
1457 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1458 }
1459 }
1460 eh_table[eh_table_size++] = n;
1461 #endif
1462 }
1463
1464 /* Return a non-zero value if we need to output an exception table.
1465
1466 On some platforms, we don't have to output a table explicitly.
1467 This routine doesn't mean we don't have one. */
1468
1469 int
1470 exception_table_p ()
1471 {
1472 if (eh_table)
1473 return 1;
1474
1475 return 0;
1476 }
1477
1478 /* Output the entry of the exception table corresponding to to the
1479 exception region numbered N to file FILE.
1480
1481 N is the code label number corresponding to the handler of the
1482 region. */
1483
1484 static void
1485 output_exception_table_entry (file, n)
1486 FILE *file;
1487 int n;
1488 {
1489 char buf[256];
1490 rtx sym;
1491
1492 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1493 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1494 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1495
1496 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1497 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1498 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1499
1500 ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1501 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1502 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1503
1504 putc ('\n', file); /* blank line */
1505 }
1506
1507 /* Output the exception table if we have and need one. */
1508
1509 void
1510 output_exception_table ()
1511 {
1512 int i;
1513 extern FILE *asm_out_file;
1514
1515 if (! doing_eh (0))
1516 return;
1517
1518 exception_section ();
1519
1520 /* Beginning marker for table. */
1521 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1522 assemble_label ("__EXCEPTION_TABLE__");
1523
1524 assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1525 assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1526 assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1527 putc ('\n', asm_out_file); /* blank line */
1528
1529 for (i = 0; i < eh_table_size; ++i)
1530 output_exception_table_entry (asm_out_file, eh_table[i]);
1531
1532 free (eh_table);
1533
1534 /* Ending marker for table. */
1535 assemble_label ("__EXCEPTION_END__");
1536 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1537 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1538 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1539 putc ('\n', asm_out_file); /* blank line */
1540 }
1541
1542 /* Generate code to initialize the exception table at program startup
1543 time. */
1544
1545 void
1546 register_exception_table ()
1547 {
1548 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1549 VOIDmode, 1,
1550 gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1551 Pmode);
1552 }
1553 \f
1554 /* Emit the RTL for the start of the per-function unwinder for the
1555 current function. See emit_unwinder for further information.
1556
1557 DOESNT_NEED_UNWINDER is a target-specific macro that determines if
1558 the current function actually needs a per-function unwinder or not.
1559 By default, all functions need one. */
1560
1561 void
1562 start_eh_unwinder ()
1563 {
1564 #ifdef DOESNT_NEED_UNWINDER
1565 if (DOESNT_NEED_UNWINDER)
1566 return;
1567 #endif
1568
1569 /* If we are using the setjmp/longjmp implementation, we don't need a
1570 per function unwinder. */
1571
1572 if (exceptions_via_longjmp)
1573 return;
1574
1575 expand_eh_region_start ();
1576 }
1577
1578 /* Emit insns for the end of the per-function unwinder for the
1579 current function. */
1580
1581 void
1582 end_eh_unwinder ()
1583 {
1584 tree expr;
1585 rtx return_val_rtx, ret_val, label, end, insns;
1586
1587 if (! doing_eh (0))
1588 return;
1589
1590 #ifdef DOESNT_NEED_UNWINDER
1591 if (DOESNT_NEED_UNWINDER)
1592 return;
1593 #endif
1594
1595 /* If we are using the setjmp/longjmp implementation, we don't need a
1596 per function unwinder. */
1597
1598 if (exceptions_via_longjmp)
1599 return;
1600
1601 assemble_external (eh_saved_pc);
1602
1603 expr = make_node (RTL_EXPR);
1604 TREE_TYPE (expr) = void_type_node;
1605 RTL_EXPR_RTL (expr) = const0_rtx;
1606 TREE_SIDE_EFFECTS (expr) = 1;
1607 start_sequence_for_rtl_expr (expr);
1608
1609 /* ret_val will contain the address of the code where the call
1610 to the current function occurred. */
1611 ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
1612 0, hard_frame_pointer_rtx);
1613 return_val_rtx = copy_to_reg (ret_val);
1614
1615 /* Get the address we need to use to determine what exception
1616 handler should be invoked, and store it in __eh_pc. */
1617 return_val_rtx = eh_outer_context (return_val_rtx);
1618 emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
1619
1620 /* Either set things up so we do a return directly to __throw, or
1621 we return here instead. */
1622 #ifdef JUMP_TO_THROW
1623 emit_move_insn (ret_val, throw_libfunc);
1624 #else
1625 label = gen_label_rtx ();
1626 emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
1627 #endif
1628
1629 #ifdef RETURN_ADDR_OFFSET
1630 return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
1631 if (return_val_rtx != ret_val)
1632 emit_move_insn (ret_val, return_val_rtx);
1633 #endif
1634
1635 end = gen_label_rtx ();
1636 emit_jump (end);
1637
1638 RTL_EXPR_SEQUENCE (expr) = get_insns ();
1639 end_sequence ();
1640
1641 expand_eh_region_end (expr);
1642
1643 emit_jump (end);
1644
1645 #ifndef JUMP_TO_THROW
1646 emit_label (label);
1647 emit_throw ();
1648 #endif
1649
1650 expand_leftover_cleanups ();
1651
1652 emit_label (end);
1653
1654 #ifdef HAVE_return
1655 if (HAVE_return)
1656 {
1657 emit_jump_insn (gen_return ());
1658 emit_barrier ();
1659 }
1660 #endif
1661 }
1662
1663 /* If necessary, emit insns for the per function unwinder for the
1664 current function. Called after all the code that needs unwind
1665 protection is output.
1666
1667 The unwinder takes care of catching any exceptions that have not
1668 been previously caught within the function, unwinding the stack to
1669 the next frame, and rethrowing using the address of the current
1670 function's caller as the context of the throw.
1671
1672 On some platforms __throw can do this by itself (or with the help
1673 of __unwind_function) so the per-function unwinder is
1674 unnecessary.
1675
1676 We cannot place the unwinder into the function until after we know
1677 we are done inlining, as we don't want to have more than one
1678 unwinder per non-inlined function. */
1679
1680 void
1681 emit_unwinder ()
1682 {
1683 rtx insns, insn;
1684
1685 start_sequence ();
1686 start_eh_unwinder ();
1687 insns = get_insns ();
1688 end_sequence ();
1689
1690 /* We place the start of the exception region associated with the
1691 per function unwinder at the top of the function. */
1692 if (insns)
1693 emit_insns_after (insns, get_insns ());
1694
1695 start_sequence ();
1696 end_eh_unwinder ();
1697 insns = get_insns ();
1698 end_sequence ();
1699
1700 /* And we place the end of the exception region before the USE and
1701 CLOBBER insns that may come at the end of the function. */
1702 if (insns == 0)
1703 return;
1704
1705 insn = get_last_insn ();
1706 while (GET_CODE (insn) == NOTE
1707 || (GET_CODE (insn) == INSN
1708 && (GET_CODE (PATTERN (insn)) == USE
1709 || GET_CODE (PATTERN (insn)) == CLOBBER)))
1710 insn = PREV_INSN (insn);
1711
1712 if (GET_CODE (insn) == CODE_LABEL
1713 && GET_CODE (PREV_INSN (insn)) == BARRIER)
1714 {
1715 insn = PREV_INSN (insn);
1716 }
1717 else
1718 {
1719 rtx label = gen_label_rtx ();
1720 emit_label_after (label, insn);
1721 insn = emit_jump_insn_after (gen_jump (label), insn);
1722 insn = emit_barrier_after (insn);
1723 }
1724
1725 emit_insns_after (insns, insn);
1726 }
1727
1728 /* Scan the current insns and build a list of handler labels. The
1729 resulting list is placed in the global variable exception_handler_labels.
1730
1731 It is called after the last exception handling region is added to
1732 the current function (when the rtl is almost all built for the
1733 current function) and before the jump optimization pass. */
1734
1735 void
1736 find_exception_handler_labels ()
1737 {
1738 rtx insn;
1739 int max_labelno = max_label_num ();
1740 int min_labelno = get_first_label_num ();
1741 rtx *labels;
1742
1743 exception_handler_labels = NULL_RTX;
1744
1745 /* If we aren't doing exception handling, there isn't much to check. */
1746 if (! doing_eh (0))
1747 return;
1748
1749 /* Generate a handy reference to each label. */
1750
1751 /* We call xmalloc here instead of alloca; we did the latter in the past,
1752 but found that it can sometimes end up being asked to allocate space
1753 for more than 1 million labels. */
1754 labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1755 bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1756
1757 /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
1758 labels -= min_labelno;
1759
1760 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1761 {
1762 if (GET_CODE (insn) == CODE_LABEL)
1763 if (CODE_LABEL_NUMBER (insn) >= min_labelno
1764 && CODE_LABEL_NUMBER (insn) < max_labelno)
1765 labels[CODE_LABEL_NUMBER (insn)] = insn;
1766 }
1767
1768 /* For each start of a region, add its label to the list. */
1769
1770 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1771 {
1772 if (GET_CODE (insn) == NOTE
1773 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1774 {
1775 rtx label = NULL_RTX;
1776
1777 if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1778 && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1779 {
1780 label = labels[NOTE_BLOCK_NUMBER (insn)];
1781
1782 if (label)
1783 exception_handler_labels
1784 = gen_rtx (EXPR_LIST, VOIDmode,
1785 label, exception_handler_labels);
1786 else
1787 warning ("didn't find handler for EH region %d",
1788 NOTE_BLOCK_NUMBER (insn));
1789 }
1790 else
1791 warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1792 }
1793 }
1794 }
1795
1796 /* Perform sanity checking on the exception_handler_labels list.
1797
1798 Can be called after find_exception_handler_labels is called to
1799 build the list of exception handlers for the current function and
1800 before we finish processing the current function. */
1801
1802 void
1803 check_exception_handler_labels ()
1804 {
1805 rtx insn, handler;
1806
1807 /* If we aren't doing exception handling, there isn't much to check. */
1808 if (! doing_eh (0))
1809 return;
1810
1811 /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1812 in each handler corresponds to the CODE_LABEL_NUMBER of the
1813 handler. */
1814
1815 for (handler = exception_handler_labels;
1816 handler;
1817 handler = XEXP (handler, 1))
1818 {
1819 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1820 {
1821 if (GET_CODE (insn) == CODE_LABEL)
1822 {
1823 if (CODE_LABEL_NUMBER (insn)
1824 == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1825 {
1826 if (insn != XEXP (handler, 0))
1827 warning ("mismatched handler %d",
1828 CODE_LABEL_NUMBER (insn));
1829 break;
1830 }
1831 }
1832 }
1833 if (insn == NULL_RTX)
1834 warning ("handler not found %d",
1835 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1836 }
1837
1838 /* Now go through and make sure that for each region there is a
1839 corresponding label. */
1840 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1841 {
1842 if (GET_CODE (insn) == NOTE
1843 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1844 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1845 {
1846 for (handler = exception_handler_labels;
1847 handler;
1848 handler = XEXP (handler, 1))
1849 {
1850 if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1851 == NOTE_BLOCK_NUMBER (insn))
1852 break;
1853 }
1854 if (handler == NULL_RTX)
1855 warning ("region exists, no handler %d",
1856 NOTE_BLOCK_NUMBER (insn));
1857 }
1858 }
1859 }
1860 \f
1861 /* This group of functions initializes the exception handling data
1862 structures at the start of the compilation, initializes the data
1863 structures at the start of a function, and saves and restores the
1864 exception handling data structures for the start/end of a nested
1865 function. */
1866
1867 /* Toplevel initialization for EH things. */
1868
1869 void
1870 init_eh ()
1871 {
1872 /* Generate rtl to reference the variable in which the PC of the
1873 current context is saved. */
1874 tree type = build_pointer_type (make_node (VOID_TYPE));
1875
1876 eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
1877 DECL_EXTERNAL (eh_saved_pc) = 1;
1878 TREE_PUBLIC (eh_saved_pc) = 1;
1879 make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
1880 eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
1881 }
1882
1883 /* Initialize the per-function EH information. */
1884
1885 void
1886 init_eh_for_function ()
1887 {
1888 ehstack.top = 0;
1889 ehqueue.head = ehqueue.tail = 0;
1890 catch_clauses = NULL_RTX;
1891 false_label_stack = 0;
1892 caught_return_label_stack = 0;
1893 protect_list = NULL_TREE;
1894 current_function_dhc = NULL_RTX;
1895 current_function_dcc = NULL_RTX;
1896 }
1897
1898 /* Save some of the per-function EH info into the save area denoted by
1899 P.
1900
1901 This is currently called from save_stmt_status. */
1902
1903 void
1904 save_eh_status (p)
1905 struct function *p;
1906 {
1907 assert (p != NULL);
1908
1909 p->ehstack = ehstack;
1910 p->ehqueue = ehqueue;
1911 p->catch_clauses = catch_clauses;
1912 p->false_label_stack = false_label_stack;
1913 p->caught_return_label_stack = caught_return_label_stack;
1914 p->protect_list = protect_list;
1915 p->dhc = current_function_dhc;
1916 p->dcc = current_function_dcc;
1917
1918 init_eh ();
1919 }
1920
1921 /* Restore the per-function EH info saved into the area denoted by P.
1922
1923 This is currently called from restore_stmt_status. */
1924
1925 void
1926 restore_eh_status (p)
1927 struct function *p;
1928 {
1929 assert (p != NULL);
1930
1931 protect_list = p->protect_list;
1932 caught_return_label_stack = p->caught_return_label_stack;
1933 false_label_stack = p->false_label_stack;
1934 catch_clauses = p->catch_clauses;
1935 ehqueue = p->ehqueue;
1936 ehstack = p->ehstack;
1937 current_function_dhc = p->dhc;
1938 current_function_dcc = p->dcc;
1939 }
1940 \f
1941 /* This section is for the exception handling specific optimization
1942 pass. First are the internal routines, and then the main
1943 optimization pass. */
1944
1945 /* Determine if the given INSN can throw an exception. */
1946
1947 static int
1948 can_throw (insn)
1949 rtx insn;
1950 {
1951 /* Calls can always potentially throw exceptions. */
1952 if (GET_CODE (insn) == CALL_INSN)
1953 return 1;
1954
1955 if (asynchronous_exceptions)
1956 {
1957 /* If we wanted asynchronous exceptions, then everything but NOTEs
1958 and CODE_LABELs could throw. */
1959 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
1960 return 1;
1961 }
1962
1963 return 0;
1964 }
1965
1966 /* Scan a exception region looking for the matching end and then
1967 remove it if possible. INSN is the start of the region, N is the
1968 region number, and DELETE_OUTER is to note if anything in this
1969 region can throw.
1970
1971 Regions are removed if they cannot possibly catch an exception.
1972 This is determined by invoking can_throw on each insn within the
1973 region; if can_throw returns true for any of the instructions, the
1974 region can catch an exception, since there is an insn within the
1975 region that is capable of throwing an exception.
1976
1977 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
1978 calls abort if it can't find one.
1979
1980 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
1981 correspond to the region number, or if DELETE_OUTER is NULL. */
1982
1983 static rtx
1984 scan_region (insn, n, delete_outer)
1985 rtx insn;
1986 int n;
1987 int *delete_outer;
1988 {
1989 rtx start = insn;
1990
1991 /* Assume we can delete the region. */
1992 int delete = 1;
1993
1994 assert (insn != NULL_RTX
1995 && GET_CODE (insn) == NOTE
1996 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1997 && NOTE_BLOCK_NUMBER (insn) == n
1998 && delete_outer != NULL);
1999
2000 insn = NEXT_INSN (insn);
2001
2002 /* Look for the matching end. */
2003 while (! (GET_CODE (insn) == NOTE
2004 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2005 {
2006 /* If anything can throw, we can't remove the region. */
2007 if (delete && can_throw (insn))
2008 {
2009 delete = 0;
2010 }
2011
2012 /* Watch out for and handle nested regions. */
2013 if (GET_CODE (insn) == NOTE
2014 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2015 {
2016 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2017 }
2018
2019 insn = NEXT_INSN (insn);
2020 }
2021
2022 /* The _BEG/_END NOTEs must match and nest. */
2023 if (NOTE_BLOCK_NUMBER (insn) != n)
2024 abort ();
2025
2026 /* If anything in this exception region can throw, we can throw. */
2027 if (! delete)
2028 *delete_outer = 0;
2029 else
2030 {
2031 /* Delete the start and end of the region. */
2032 delete_insn (start);
2033 delete_insn (insn);
2034
2035 /* Only do this part if we have built the exception handler
2036 labels. */
2037 if (exception_handler_labels)
2038 {
2039 rtx x, *prev = &exception_handler_labels;
2040
2041 /* Find it in the list of handlers. */
2042 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2043 {
2044 rtx label = XEXP (x, 0);
2045 if (CODE_LABEL_NUMBER (label) == n)
2046 {
2047 /* If we are the last reference to the handler,
2048 delete it. */
2049 if (--LABEL_NUSES (label) == 0)
2050 delete_insn (label);
2051
2052 if (optimize)
2053 {
2054 /* Remove it from the list of exception handler
2055 labels, if we are optimizing. If we are not, then
2056 leave it in the list, as we are not really going to
2057 remove the region. */
2058 *prev = XEXP (x, 1);
2059 XEXP (x, 1) = 0;
2060 XEXP (x, 0) = 0;
2061 }
2062
2063 break;
2064 }
2065 prev = &XEXP (x, 1);
2066 }
2067 }
2068 }
2069 return insn;
2070 }
2071
2072 /* Perform various interesting optimizations for exception handling
2073 code.
2074
2075 We look for empty exception regions and make them go (away). The
2076 jump optimization code will remove the handler if nothing else uses
2077 it. */
2078
2079 void
2080 exception_optimize ()
2081 {
2082 rtx insn, regions = NULL_RTX;
2083 int n;
2084
2085 /* The below doesn't apply to setjmp/longjmp EH. */
2086 if (exceptions_via_longjmp)
2087 return;
2088
2089 /* Remove empty regions. */
2090 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2091 {
2092 if (GET_CODE (insn) == NOTE
2093 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2094 {
2095 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2096 insn, we will indirectly skip through all the insns
2097 inbetween. We are also guaranteed that the value of insn
2098 returned will be valid, as otherwise scan_region won't
2099 return. */
2100 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2101 }
2102 }
2103 }
This page took 0.177126 seconds and 6 git commands to generate.