]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
except.c (get_dynamic_handler_chain): Only make the call once per function.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-96, 1997 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code assocated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fno-sjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementaion, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 If the target does not use the DWARF 2 frame unwind information, at
91 program startup each object file invokes a function named
92 __register_exceptions with the address of its local
93 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94 is responsible for recording all of the exception regions into one list
95 (which is kept in a static variable named exception_table_list).
96
97 On targets that support crtstuff.c, the unwind information
98 is stored in a section named .eh_frame and the information for the
99 entire shared object or program is registered with a call to
100 __register_frame. On other targets, the information for each
101 translation unit is registered from the file generated by collect2.
102 __register_frame is defined in frame.c, and is responsible for
103 recording all of the unwind regions into one list (which is kept in a
104 static variable named unwind_table_list).
105
106 The function __throw is actually responsible for doing the
107 throw. On machines that have unwind info support, __throw is generated
108 by code in libgcc2.c, otherwise __throw is generated on a
109 per-object-file basis for each source file compiled with
110 -fexceptions by the the C++ frontend. Before __throw is invoked,
111 the current context of the throw needs to be placed in the global
112 variable __eh_pc.
113
114 __throw attempts to find the appropriate exception handler for the
115 PC value stored in __eh_pc by calling __find_first_exception_table_match
116 (which is defined in libgcc2.c). If __find_first_exception_table_match
117 finds a relevant handler, __throw transfers control directly to it.
118
119 If a handler for the context being thrown from can't be found, __throw
120 walks (see Walking the stack below) the stack up the dynamic call chain to
121 continue searching for an appropriate exception handler based upon the
122 caller of the function it last sought a exception handler for. It stops
123 then either an exception handler is found, or when the top of the
124 call chain is reached.
125
126 If no handler is found, an external library function named
127 __terminate is called. If a handler is found, then we restart
128 our search for a handler at the end of the call chain, and repeat
129 the search process, but instead of just walking up the call chain,
130 we unwind the call chain as we walk up it.
131
132 Internal implementation details:
133
134 To associate a user-defined handler with a block of statements, the
135 function expand_start_try_stmts is used to mark the start of the
136 block of statements with which the handler is to be associated
137 (which is known as a "try block"). All statements that appear
138 afterwards will be associated with the try block.
139
140 A call to expand_start_all_catch marks the end of the try block,
141 and also marks the start of the "catch block" (the user-defined
142 handler) associated with the try block.
143
144 This user-defined handler will be invoked for *every* exception
145 thrown with the context of the try block. It is up to the handler
146 to decide whether or not it wishes to handle any given exception,
147 as there is currently no mechanism in this implementation for doing
148 this. (There are plans for conditionally processing an exception
149 based on its "type", which will provide a language-independent
150 mechanism).
151
152 If the handler chooses not to process the exception (perhaps by
153 looking at an "exception type" or some other additional data
154 supplied with the exception), it can fall through to the end of the
155 handler. expand_end_all_catch and expand_leftover_cleanups
156 add additional code to the end of each handler to take care of
157 rethrowing to the outer exception handler.
158
159 The handler also has the option to continue with "normal flow of
160 code", or in other words to resume executing at the statement
161 immediately after the end of the exception region. The variable
162 caught_return_label_stack contains a stack of labels, and jumping
163 to the topmost entry's label via expand_goto will resume normal
164 flow to the statement immediately after the end of the exception
165 region. If the handler falls through to the end, the exception will
166 be rethrown to the outer exception region.
167
168 The instructions for the catch block are kept as a separate
169 sequence, and will be emitted at the end of the function along with
170 the handlers specified via expand_eh_region_end. The end of the
171 catch block is marked with expand_end_all_catch.
172
173 Any data associated with the exception must currently be handled by
174 some external mechanism maintained in the frontend. For example,
175 the C++ exception mechanism passes an arbitrary value along with
176 the exception, and this is handled in the C++ frontend by using a
177 global variable to hold the value. (This will be changing in the
178 future.)
179
180 The mechanism in C++ for handling data associated with the
181 exception is clearly not thread-safe. For a thread-based
182 environment, another mechanism must be used (possibly using a
183 per-thread allocation mechanism if the size of the area that needs
184 to be allocated isn't known at compile time.)
185
186 Internally-generated exception regions (cleanups) are marked by
187 calling expand_eh_region_start to mark the start of the region,
188 and expand_eh_region_end (handler) is used to both designate the
189 end of the region and to associate a specified handler/cleanup with
190 the region. The rtl code in HANDLER will be invoked whenever an
191 exception occurs in the region between the calls to
192 expand_eh_region_start and expand_eh_region_end. After HANDLER is
193 executed, additional code is emitted to handle rethrowing the
194 exception to the outer exception handler. The code for HANDLER will
195 be emitted at the end of the function.
196
197 TARGET_EXPRs can also be used to designate exception regions. A
198 TARGET_EXPR gives an unwind-protect style interface commonly used
199 in functional languages such as LISP. The associated expression is
200 evaluated, and whether or not it (or any of the functions that it
201 calls) throws an exception, the protect expression is always
202 invoked. This implementation takes care of the details of
203 associating an exception table entry with the expression and
204 generating the necessary code (it actually emits the protect
205 expression twice, once for normal flow and once for the exception
206 case). As for the other handlers, the code for the exception case
207 will be emitted at the end of the function.
208
209 Cleanups can also be specified by using add_partial_entry (handler)
210 and end_protect_partials. add_partial_entry creates the start of
211 a new exception region; HANDLER will be invoked if an exception is
212 thrown with the context of the region between the calls to
213 add_partial_entry and end_protect_partials. end_protect_partials is
214 used to mark the end of these regions. add_partial_entry can be
215 called as many times as needed before calling end_protect_partials.
216 However, end_protect_partials should only be invoked once for each
217 group of calls to add_partial_entry as the entries are queued
218 and all of the outstanding entries are processed simultaneously
219 when end_protect_partials is invoked. Similarly to the other
220 handlers, the code for HANDLER will be emitted at the end of the
221 function.
222
223 The generated RTL for an exception region includes
224 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225 the start and end of the exception region. A unique label is also
226 generated at the start of the exception region, which is available
227 by looking at the ehstack variable. The topmost entry corresponds
228 to the current region.
229
230 In the current implementation, an exception can only be thrown from
231 a function call (since the mechanism used to actually throw an
232 exception involves calling __throw). If an exception region is
233 created but no function calls occur within that region, the region
234 can be safely optimized away (along with its exception handlers)
235 since no exceptions can ever be caught in that region. This
236 optimization is performed unless -fasynchronous-exceptions is
237 given. If the user wishes to throw from a signal handler, or other
238 asynchronous place, -fasynchronous-exceptions should be used when
239 compiling for maximally correct code, at the cost of additional
240 exception regions. Using -fasynchronous-exceptions only produces
241 code that is reasonably safe in such situations, but a correct
242 program cannot rely upon this working. It can be used in failsafe
243 code, where trying to continue on, and proceeding with potentially
244 incorrect results is better than halting the program.
245
246
247 Walking the stack:
248
249 The stack is walked by starting with a pointer to the current
250 frame, and finding the pointer to the callers frame. The unwind info
251 tells __throw how to find it.
252
253 Unwinding the stack:
254
255 When we use the term unwinding the stack, we mean undoing the
256 effects of the function prologue in a controlled fashion so that we
257 still have the flow of control. Otherwise, we could just return
258 (jump to the normal end of function epilogue).
259
260 This is done in __throw in libgcc2.c when we know that a handler exists
261 in a frame higher up the call stack than its immediate caller.
262
263 To unwind, we find the unwind data associated with the frame, if any.
264 If we don't find any, we call the library routine __terminate. If we do
265 find it, we use the information to copy the saved register values from
266 that frame into the register save area in the frame for __throw, return
267 into a stub which updates the stack pointer, and jump to the handler.
268 The normal function epilogue for __throw handles restoring the saved
269 values into registers.
270
271 When unwinding, we use this method if we know it will
272 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
273 an inline unwinder will have been emitted for any function that
274 __unwind_function cannot unwind. The inline unwinder appears as a
275 normal exception handler for the entire function, for any function
276 that we know cannot be unwound by __unwind_function. We inform the
277 compiler of whether a function can be unwound with
278 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279 when the unwinder isn't needed. __unwind_function is used as an
280 action of last resort. If no other method can be used for
281 unwinding, __unwind_function is used. If it cannot unwind, it
282 should call __teminate.
283
284 By default, if the target-specific backend doesn't supply a definition
285 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286 unwinders will be used instead. The main tradeoff here is in text space
287 utilization. Obviously, if inline unwinders have to be generated
288 repeatedly, this uses much more space than if a single routine is used.
289
290 However, it is simply not possible on some platforms to write a
291 generalized routine for doing stack unwinding without having some
292 form of additional data associated with each function. The current
293 implementation can encode this data in the form of additional
294 machine instructions or as static data in tabular form. The later
295 is called the unwind data.
296
297 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299 defined and has a non-zero value, a per-function unwinder is not emitted
300 for the current function. If the static unwind data is supported, then
301 a per-function unwinder is not emitted.
302
303 On some platforms it is possible that neither __unwind_function
304 nor inlined unwinders are available. For these platforms it is not
305 possible to throw through a function call, and abort will be
306 invoked instead of performing the throw.
307
308 The reason the unwind data may be needed is that on some platforms
309 the order and types of data stored on the stack can vary depending
310 on the type of function, its arguments and returned values, and the
311 compilation options used (optimization versus non-optimization,
312 -fomit-frame-pointer, processor variations, etc).
313
314 Unfortunately, this also means that throwing through functions that
315 aren't compiled with exception handling support will still not be
316 possible on some platforms. This problem is currently being
317 investigated, but no solutions have been found that do not imply
318 some unacceptable performance penalties.
319
320 Future directions:
321
322 Currently __throw makes no differentiation between cleanups and
323 user-defined exception regions. While this makes the implementation
324 simple, it also implies that it is impossible to determine if a
325 user-defined exception handler exists for a given exception without
326 completely unwinding the stack in the process. This is undesirable
327 from the standpoint of debugging, as ideally it would be possible
328 to trap unhandled exceptions in the debugger before the process of
329 unwinding has even started.
330
331 This problem can be solved by marking user-defined handlers in a
332 special way (probably by adding additional bits to exception_table_list).
333 A two-pass scheme could then be used by __throw to iterate
334 through the table. The first pass would search for a relevant
335 user-defined handler for the current context of the throw, and if
336 one is found, the second pass would then invoke all needed cleanups
337 before jumping to the user-defined handler.
338
339 Many languages (including C++ and Ada) make execution of a
340 user-defined handler conditional on the "type" of the exception
341 thrown. (The type of the exception is actually the type of the data
342 that is thrown with the exception.) It will thus be necessary for
343 __throw to be able to determine if a given user-defined
344 exception handler will actually be executed, given the type of
345 exception.
346
347 One scheme is to add additional information to exception_table_list
348 as to the types of exceptions accepted by each handler. __throw
349 can do the type comparisons and then determine if the handler is
350 actually going to be executed.
351
352 There is currently no significant level of debugging support
353 available, other than to place a breakpoint on __throw. While
354 this is sufficient in most cases, it would be helpful to be able to
355 know where a given exception was going to be thrown to before it is
356 actually thrown, and to be able to choose between stopping before
357 every exception region (including cleanups), or just user-defined
358 exception regions. This should be possible to do in the two-pass
359 scheme by adding additional labels to __throw for appropriate
360 breakpoints, and additional debugger commands could be added to
361 query various state variables to determine what actions are to be
362 performed next.
363
364 Another major problem that is being worked on is the issue with stack
365 unwinding on various platforms. Currently the only platforms that have
366 support for the generation of a generic unwinder are the SPARC and MIPS.
367 All other ports require per-function unwinders, which produce large
368 amounts of code bloat.
369
370 For setjmp/longjmp based exception handling, some of the details
371 are as above, but there are some additional details. This section
372 discusses the details.
373
374 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
375 optimize EH regions yet. We don't have to worry about machine
376 specific issues with unwinding the stack, as we rely upon longjmp
377 for all the machine specific details. There is no variable context
378 of a throw, just the one implied by the dynamic handler stack
379 pointed to by the dynamic handler chain. There is no exception
380 table, and no calls to __register_excetpions. __sjthrow is used
381 instead of __throw, and it works by using the dynamic handler
382 chain, and longjmp. -fasynchronous-exceptions has no effect, as
383 the elimination of trivial exception regions is not yet performed.
384
385 A frontend can set protect_cleanup_actions_with_terminate when all
386 the cleanup actions should be protected with an EH region that
387 calls terminate when an unhandled exception is throw. C++ does
388 this, Ada does not. */
389
390
391 #include "config.h"
392 #include "defaults.h"
393 #include <stdio.h>
394 #include "rtl.h"
395 #include "tree.h"
396 #include "flags.h"
397 #include "except.h"
398 #include "function.h"
399 #include "insn-flags.h"
400 #include "expr.h"
401 #include "insn-codes.h"
402 #include "regs.h"
403 #include "hard-reg-set.h"
404 #include "insn-config.h"
405 #include "recog.h"
406 #include "output.h"
407
408 /* One to use setjmp/longjmp method of generating code for exception
409 handling. */
410
411 int exceptions_via_longjmp = 2;
412
413 /* One to enable asynchronous exception support. */
414
415 int asynchronous_exceptions = 0;
416
417 /* One to protect cleanup actions with a handler that calls
418 __terminate, zero otherwise. */
419
420 int protect_cleanup_actions_with_terminate;
421
422 /* A list of labels used for exception handlers. Created by
423 find_exception_handler_labels for the optimization passes. */
424
425 rtx exception_handler_labels;
426
427 /* Nonzero means that __throw was invoked.
428
429 This is used by the C++ frontend to know if code needs to be emitted
430 for __throw or not. */
431
432 int throw_used;
433
434 /* The dynamic handler chain. Nonzero if the function has already
435 fetched a pointer to the dynamic handler chain for exception
436 handling. */
437
438 rtx current_function_dhc;
439
440 /* The dynamic cleanup chain. Nonzero if the function has already
441 fetched a pointer to the dynamic cleanup chain for exception
442 handling. */
443
444 rtx current_function_dcc;
445
446 /* A stack used for keeping track of the currectly active exception
447 handling region. As each exception region is started, an entry
448 describing the region is pushed onto this stack. The current
449 region can be found by looking at the top of the stack, and as we
450 exit regions, the corresponding entries are popped.
451
452 Entries cannot overlap; they can be nested. So there is only one
453 entry at most that corresponds to the current instruction, and that
454 is the entry on the top of the stack. */
455
456 static struct eh_stack ehstack;
457
458 /* A queue used for tracking which exception regions have closed but
459 whose handlers have not yet been expanded. Regions are emitted in
460 groups in an attempt to improve paging performance.
461
462 As we exit a region, we enqueue a new entry. The entries are then
463 dequeued during expand_leftover_cleanups and expand_start_all_catch,
464
465 We should redo things so that we either take RTL for the handler,
466 or we expand the handler expressed as a tree immediately at region
467 end time. */
468
469 static struct eh_queue ehqueue;
470
471 /* Insns for all of the exception handlers for the current function.
472 They are currently emitted by the frontend code. */
473
474 rtx catch_clauses;
475
476 /* A TREE_CHAINed list of handlers for regions that are not yet
477 closed. The TREE_VALUE of each entry contains the handler for the
478 corresponding entry on the ehstack. */
479
480 static tree protect_list;
481
482 /* Stacks to keep track of various labels. */
483
484 /* Keeps track of the label to resume to should one want to resume
485 normal control flow out of a handler (instead of, say, returning to
486 the caller of the current function or exiting the program). */
487
488 struct label_node *caught_return_label_stack = NULL;
489
490 /* A random data area for the front end's own use. */
491
492 struct label_node *false_label_stack = NULL;
493
494 #ifndef DWARF2_UNWIND_INFO
495 /* The rtx and the tree for the saved PC value. */
496
497 rtx eh_saved_pc_rtx;
498 tree eh_saved_pc;
499 #endif
500
501 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
502 static void expand_rethrow PROTO((rtx));
503
504 \f
505 /* Various support routines to manipulate the various data structures
506 used by the exception handling code. */
507
508 /* Push a label entry onto the given STACK. */
509
510 void
511 push_label_entry (stack, rlabel, tlabel)
512 struct label_node **stack;
513 rtx rlabel;
514 tree tlabel;
515 {
516 struct label_node *newnode
517 = (struct label_node *) xmalloc (sizeof (struct label_node));
518
519 if (rlabel)
520 newnode->u.rlabel = rlabel;
521 else
522 newnode->u.tlabel = tlabel;
523 newnode->chain = *stack;
524 *stack = newnode;
525 }
526
527 /* Pop a label entry from the given STACK. */
528
529 rtx
530 pop_label_entry (stack)
531 struct label_node **stack;
532 {
533 rtx label;
534 struct label_node *tempnode;
535
536 if (! *stack)
537 return NULL_RTX;
538
539 tempnode = *stack;
540 label = tempnode->u.rlabel;
541 *stack = (*stack)->chain;
542 free (tempnode);
543
544 return label;
545 }
546
547 /* Return the top element of the given STACK. */
548
549 tree
550 top_label_entry (stack)
551 struct label_node **stack;
552 {
553 if (! *stack)
554 return NULL_TREE;
555
556 return (*stack)->u.tlabel;
557 }
558
559 /* Make a copy of ENTRY using xmalloc to allocate the space. */
560
561 static struct eh_entry *
562 copy_eh_entry (entry)
563 struct eh_entry *entry;
564 {
565 struct eh_entry *newentry;
566
567 newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
568 bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
569
570 return newentry;
571 }
572
573 /* Push a new eh_node entry onto STACK. */
574
575 static void
576 push_eh_entry (stack)
577 struct eh_stack *stack;
578 {
579 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
580 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
581
582 entry->outer_context = gen_label_rtx ();
583 entry->exception_handler_label = gen_label_rtx ();
584 entry->finalization = NULL_TREE;
585
586 node->entry = entry;
587 node->chain = stack->top;
588 stack->top = node;
589 }
590
591 /* Pop an entry from the given STACK. */
592
593 static struct eh_entry *
594 pop_eh_entry (stack)
595 struct eh_stack *stack;
596 {
597 struct eh_node *tempnode;
598 struct eh_entry *tempentry;
599
600 tempnode = stack->top;
601 tempentry = tempnode->entry;
602 stack->top = stack->top->chain;
603 free (tempnode);
604
605 return tempentry;
606 }
607
608 /* Enqueue an ENTRY onto the given QUEUE. */
609
610 static void
611 enqueue_eh_entry (queue, entry)
612 struct eh_queue *queue;
613 struct eh_entry *entry;
614 {
615 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
616
617 node->entry = entry;
618 node->chain = NULL;
619
620 if (queue->head == NULL)
621 {
622 queue->head = node;
623 }
624 else
625 {
626 queue->tail->chain = node;
627 }
628 queue->tail = node;
629 }
630
631 /* Dequeue an entry from the given QUEUE. */
632
633 static struct eh_entry *
634 dequeue_eh_entry (queue)
635 struct eh_queue *queue;
636 {
637 struct eh_node *tempnode;
638 struct eh_entry *tempentry;
639
640 if (queue->head == NULL)
641 return NULL;
642
643 tempnode = queue->head;
644 queue->head = queue->head->chain;
645
646 tempentry = tempnode->entry;
647 free (tempnode);
648
649 return tempentry;
650 }
651 \f
652 /* Routine to see if exception exception handling is turned on.
653 DO_WARN is non-zero if we want to inform the user that exception
654 handling is turned off.
655
656 This is used to ensure that -fexceptions has been specified if the
657 compiler tries to use any exception-specific functions. */
658
659 int
660 doing_eh (do_warn)
661 int do_warn;
662 {
663 if (! flag_exceptions)
664 {
665 static int warned = 0;
666 if (! warned && do_warn)
667 {
668 error ("exception handling disabled, use -fexceptions to enable");
669 warned = 1;
670 }
671 return 0;
672 }
673 return 1;
674 }
675
676 /* Given a return address in ADDR, determine the address we should use
677 to find the corresponding EH region. */
678
679 rtx
680 eh_outer_context (addr)
681 rtx addr;
682 {
683 /* First mask out any unwanted bits. */
684 #ifdef MASK_RETURN_ADDR
685 expand_and (addr, MASK_RETURN_ADDR, addr);
686 #endif
687
688 /* Then adjust to find the real return address. */
689 #if defined (RETURN_ADDR_OFFSET)
690 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
691 #endif
692
693 return addr;
694 }
695
696 /* Start a new exception region for a region of code that has a
697 cleanup action and push the HANDLER for the region onto
698 protect_list. All of the regions created with add_partial_entry
699 will be ended when end_protect_partials is invoked. */
700
701 void
702 add_partial_entry (handler)
703 tree handler;
704 {
705 expand_eh_region_start ();
706
707 /* Make sure the entry is on the correct obstack. */
708 push_obstacks_nochange ();
709 resume_temporary_allocation ();
710
711 /* Because this is a cleanup action, we may have to protect the handler
712 with __terminate. */
713 handler = protect_with_terminate (handler);
714
715 protect_list = tree_cons (NULL_TREE, handler, protect_list);
716 pop_obstacks ();
717 }
718
719 /* Get a reference to the dynamic handler chain. It points to the
720 pointer to the next element in the dynamic handler chain. It ends
721 when there are no more elements in the dynamic handler chain, when
722 the value is &top_elt from libgcc2.c. Immediately after the
723 pointer, is an area suitable for setjmp/longjmp when
724 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
725 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
726 isn't defined.
727
728 This routine is here to facilitate the porting of this code to
729 systems with threads. One can either replace the routine we emit a
730 call for here in libgcc2.c, or one can modify this routine to work
731 with their thread system.
732
733 Ideally, we really only want one per real function, not one
734 per inlined function. */
735
736 rtx
737 get_dynamic_handler_chain ()
738 {
739 static tree fn;
740 tree expr;
741 rtx insns;
742
743 if (current_function_dhc)
744 return current_function_dhc;
745
746 if (fn == NULL_TREE)
747 {
748 tree fntype;
749 fn = get_identifier ("__get_dynamic_handler_chain");
750 push_obstacks_nochange ();
751 end_temporary_allocation ();
752 fntype = build_pointer_type (build_pointer_type
753 (build_pointer_type (void_type_node)));
754 fntype = build_function_type (fntype, NULL_TREE);
755 fn = build_decl (FUNCTION_DECL, fn, fntype);
756 DECL_EXTERNAL (fn) = 1;
757 TREE_PUBLIC (fn) = 1;
758 DECL_ARTIFICIAL (fn) = 1;
759 TREE_READONLY (fn) = 1;
760 make_decl_rtl (fn, NULL_PTR, 1);
761 assemble_external (fn);
762 pop_obstacks ();
763 }
764
765 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
766 expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
767 expr, NULL_TREE, NULL_TREE);
768 TREE_SIDE_EFFECTS (expr) = 1;
769 expr = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (expr)), expr);
770
771 start_sequence ();
772 current_function_dhc = expand_expr (expr, NULL_RTX, VOIDmode, 0);
773 insns = get_insns ();
774 end_sequence ();
775 emit_insns_before (insns, get_first_nonparm_insn ());
776
777 return current_function_dhc;
778 }
779
780 /* Get a reference to the dynamic cleanup chain. It points to the
781 pointer to the next element in the dynamic cleanup chain.
782 Immediately after the pointer, are two Pmode variables, one for a
783 pointer to a function that performs the cleanup action, and the
784 second, the argument to pass to that function. */
785
786 rtx
787 get_dynamic_cleanup_chain ()
788 {
789 rtx dhc, dcc;
790
791 dhc = get_dynamic_handler_chain ();
792 dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
793
794 current_function_dcc = copy_to_reg (dcc);
795
796 /* We don't want a copy of the dcc, but rather, the single dcc. */
797 return gen_rtx (MEM, Pmode, current_function_dcc);
798 }
799
800 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
801 LABEL is an rtx of code CODE_LABEL, in this function. */
802
803 void
804 jumpif_rtx (x, label)
805 rtx x;
806 rtx label;
807 {
808 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
809 }
810
811 /* Generate code to evaluate X and jump to LABEL if the value is zero.
812 LABEL is an rtx of code CODE_LABEL, in this function. */
813
814 void
815 jumpifnot_rtx (x, label)
816 rtx x;
817 rtx label;
818 {
819 jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
820 }
821
822 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
823 We just need to create an element for the cleanup list, and push it
824 into the chain.
825
826 A dynamic cleanup is a cleanup action implied by the presence of an
827 element on the EH runtime dynamic cleanup stack that is to be
828 performed when an exception is thrown. The cleanup action is
829 performed by __sjthrow when an exception is thrown. Only certain
830 actions can be optimized into dynamic cleanup actions. For the
831 restrictions on what actions can be performed using this routine,
832 see expand_eh_region_start_tree. */
833
834 static void
835 start_dynamic_cleanup (func, arg)
836 tree func;
837 tree arg;
838 {
839 rtx dhc, dcc;
840 rtx new_func, new_arg;
841 rtx x, buf;
842 int size;
843
844 /* We allocate enough room for a pointer to the function, and
845 one argument. */
846 size = 2;
847
848 /* XXX, FIXME: The stack space allocated this way is too long lived,
849 but there is no allocation routine that allocates at the level of
850 the last binding contour. */
851 buf = assign_stack_local (BLKmode,
852 GET_MODE_SIZE (Pmode)*(size+1),
853 0);
854
855 buf = change_address (buf, Pmode, NULL_RTX);
856
857 /* Store dcc into the first word of the newly allocated buffer. */
858
859 dcc = get_dynamic_cleanup_chain ();
860 emit_move_insn (buf, dcc);
861
862 /* Store func and arg into the cleanup list element. */
863
864 new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
865 GET_MODE_SIZE (Pmode)));
866 new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
867 GET_MODE_SIZE (Pmode)*2));
868 x = expand_expr (func, new_func, Pmode, 0);
869 if (x != new_func)
870 emit_move_insn (new_func, x);
871
872 x = expand_expr (arg, new_arg, Pmode, 0);
873 if (x != new_arg)
874 emit_move_insn (new_arg, x);
875
876 /* Update the cleanup chain. */
877
878 emit_move_insn (dcc, XEXP (buf, 0));
879 }
880
881 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
882 handler stack. This should only be used by expand_eh_region_start
883 or expand_eh_region_start_tree. */
884
885 static void
886 start_dynamic_handler ()
887 {
888 rtx dhc, dcc;
889 rtx x, arg, buf;
890 int size;
891
892 #ifndef DONT_USE_BUILTIN_SETJMP
893 /* The number of Pmode words for the setjmp buffer, when using the
894 builtin setjmp/longjmp, see expand_builtin, case
895 BUILT_IN_LONGJMP. */
896 size = 5;
897 #else
898 #ifdef JMP_BUF_SIZE
899 size = JMP_BUF_SIZE;
900 #else
901 /* Should be large enough for most systems, if it is not,
902 JMP_BUF_SIZE should be defined with the proper value. It will
903 also tend to be larger than necessary for most systems, a more
904 optimal port will define JMP_BUF_SIZE. */
905 size = FIRST_PSEUDO_REGISTER+2;
906 #endif
907 #endif
908 /* XXX, FIXME: The stack space allocated this way is too long lived,
909 but there is no allocation routine that allocates at the level of
910 the last binding contour. */
911 arg = assign_stack_local (BLKmode,
912 GET_MODE_SIZE (Pmode)*(size+1),
913 0);
914
915 arg = change_address (arg, Pmode, NULL_RTX);
916
917 /* Store dhc into the first word of the newly allocated buffer. */
918
919 dhc = get_dynamic_handler_chain ();
920 dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
921 GET_MODE_SIZE (Pmode)));
922 emit_move_insn (arg, dhc);
923
924 /* Zero out the start of the cleanup chain. */
925 emit_move_insn (dcc, const0_rtx);
926
927 /* The jmpbuf starts two words into the area allocated. */
928 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
929
930 #ifdef DONT_USE_BUILTIN_SETJMP
931 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
932 buf, Pmode);
933 #else
934 x = expand_builtin_setjmp (buf, NULL_RTX);
935 #endif
936
937 /* If we come back here for a catch, transfer control to the
938 handler. */
939
940 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
941
942 /* We are committed to this, so update the handler chain. */
943
944 emit_move_insn (dhc, XEXP (arg, 0));
945 }
946
947 /* Start an exception handling region for the given cleanup action.
948 All instructions emitted after this point are considered to be part
949 of the region until expand_eh_region_end is invoked. CLEANUP is
950 the cleanup action to perform. The return value is true if the
951 exception region was optimized away. If that case,
952 expand_eh_region_end does not need to be called for this cleanup,
953 nor should it be.
954
955 This routine notices one particular common case in C++ code
956 generation, and optimizes it so as to not need the exception
957 region. It works by creating a dynamic cleanup action, instead of
958 of a using an exception region. */
959
960 int
961 expand_eh_region_start_tree (decl, cleanup)
962 tree decl;
963 tree cleanup;
964 {
965 rtx note;
966
967 /* This is the old code. */
968 if (! doing_eh (0))
969 return 0;
970
971 /* The optimization only applies to actions protected with
972 terminate, and only applies if we are using the setjmp/longjmp
973 codegen method. */
974 if (exceptions_via_longjmp
975 && protect_cleanup_actions_with_terminate)
976 {
977 tree func, arg;
978 tree args;
979
980 /* Ignore any UNSAVE_EXPR. */
981 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
982 cleanup = TREE_OPERAND (cleanup, 0);
983
984 /* Further, it only applies if the action is a call, if there
985 are 2 arguments, and if the second argument is 2. */
986
987 if (TREE_CODE (cleanup) == CALL_EXPR
988 && (args = TREE_OPERAND (cleanup, 1))
989 && (func = TREE_OPERAND (cleanup, 0))
990 && (arg = TREE_VALUE (args))
991 && (args = TREE_CHAIN (args))
992
993 /* is the second argument 2? */
994 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
995 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
996 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
997
998 /* Make sure there are no other arguments. */
999 && TREE_CHAIN (args) == NULL_TREE)
1000 {
1001 /* Arrange for returns and gotos to pop the entry we make on the
1002 dynamic cleanup stack. */
1003 expand_dcc_cleanup (decl);
1004 start_dynamic_cleanup (func, arg);
1005 return 1;
1006 }
1007 }
1008
1009 expand_eh_region_start_for_decl (decl);
1010 ehstack.top->entry->finalization = cleanup;
1011
1012 return 0;
1013 }
1014
1015 /* Just like expand_eh_region_start, except if a cleanup action is
1016 entered on the cleanup chain, the TREE_PURPOSE of the element put
1017 on the chain is DECL. DECL should be the associated VAR_DECL, if
1018 any, otherwise it should be NULL_TREE. */
1019
1020 void
1021 expand_eh_region_start_for_decl (decl)
1022 tree decl;
1023 {
1024 rtx note;
1025
1026 /* This is the old code. */
1027 if (! doing_eh (0))
1028 return;
1029
1030 if (exceptions_via_longjmp)
1031 {
1032 /* We need a new block to record the start and end of the
1033 dynamic handler chain. We could always do this, but we
1034 really want to permit jumping into such a block, and we want
1035 to avoid any errors or performance impact in the SJ EH code
1036 for now. */
1037 expand_start_bindings (0);
1038
1039 /* But we don't need or want a new temporary level. */
1040 pop_temp_slots ();
1041
1042 /* Mark this block as created by expand_eh_region_start. This
1043 is so that we can pop the block with expand_end_bindings
1044 automatically. */
1045 mark_block_as_eh_region ();
1046
1047 /* Arrange for returns and gotos to pop the entry we make on the
1048 dynamic handler stack. */
1049 expand_dhc_cleanup (decl);
1050 }
1051
1052 push_eh_entry (&ehstack);
1053 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1054 NOTE_BLOCK_NUMBER (note)
1055 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1056 if (exceptions_via_longjmp)
1057 start_dynamic_handler ();
1058 }
1059
1060 /* Start an exception handling region. All instructions emitted after
1061 this point are considered to be part of the region until
1062 expand_eh_region_end is invoked. */
1063
1064 void
1065 expand_eh_region_start ()
1066 {
1067 expand_eh_region_start_for_decl (NULL_TREE);
1068 }
1069
1070 /* End an exception handling region. The information about the region
1071 is found on the top of ehstack.
1072
1073 HANDLER is either the cleanup for the exception region, or if we're
1074 marking the end of a try block, HANDLER is integer_zero_node.
1075
1076 HANDLER will be transformed to rtl when expand_leftover_cleanups
1077 is invoked. */
1078
1079 void
1080 expand_eh_region_end (handler)
1081 tree handler;
1082 {
1083 struct eh_entry *entry;
1084 rtx note;
1085
1086 if (! doing_eh (0))
1087 return;
1088
1089 entry = pop_eh_entry (&ehstack);
1090
1091 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1092 NOTE_BLOCK_NUMBER (note)
1093 = CODE_LABEL_NUMBER (entry->exception_handler_label);
1094 if (exceptions_via_longjmp == 0
1095 /* We share outer_context between regions; only emit it once. */
1096 && INSN_UID (entry->outer_context) == 0)
1097 {
1098 rtx label;
1099
1100 label = gen_label_rtx ();
1101 emit_jump (label);
1102
1103 /* Emit a label marking the end of this exception region that
1104 is used for rethrowing into the outer context. */
1105 emit_label (entry->outer_context);
1106 expand_internal_throw ();
1107
1108 emit_label (label);
1109 }
1110
1111 entry->finalization = handler;
1112
1113 enqueue_eh_entry (&ehqueue, entry);
1114
1115 /* If we have already started ending the bindings, don't recurse.
1116 This only happens when exceptions_via_longjmp is true. */
1117 if (is_eh_region ())
1118 {
1119 /* Because we don't need or want a new temporary level and
1120 because we didn't create one in expand_eh_region_start,
1121 create a fake one now to avoid removing one in
1122 expand_end_bindings. */
1123 push_temp_slots ();
1124
1125 mark_block_as_not_eh_region ();
1126
1127 /* Maybe do this to prevent jumping in and so on... */
1128 expand_end_bindings (NULL_TREE, 0, 0);
1129 }
1130 }
1131
1132 /* End the EH region for a goto fixup. We only need them in the region-based
1133 EH scheme. */
1134
1135 void
1136 expand_fixup_region_start ()
1137 {
1138 if (! doing_eh (0) || exceptions_via_longjmp)
1139 return;
1140
1141 expand_eh_region_start ();
1142 }
1143
1144 /* End the EH region for a goto fixup. CLEANUP is the cleanup we just
1145 expanded; to avoid running it twice if it throws, we look through the
1146 ehqueue for a matching region and rethrow from its outer_context. */
1147
1148 void
1149 expand_fixup_region_end (cleanup)
1150 tree cleanup;
1151 {
1152 struct eh_node *node;
1153
1154 if (! doing_eh (0) || exceptions_via_longjmp)
1155 return;
1156
1157 for (node = ehstack.top; node && node->entry->finalization != cleanup; )
1158 node = node->chain;
1159 if (node == 0)
1160 for (node = ehqueue.head; node && node->entry->finalization != cleanup; )
1161 node = node->chain;
1162 if (node == 0)
1163 abort ();
1164
1165 ehstack.top->entry->outer_context = node->entry->outer_context;
1166
1167 /* Just rethrow. size_zero_node is just a NOP. */
1168 expand_eh_region_end (size_zero_node);
1169 }
1170
1171 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1172 call to __sjthrow.
1173
1174 Otherwise, we emit a call to __throw and note that we threw
1175 something, so we know we need to generate the necessary code for
1176 __throw.
1177
1178 Before invoking throw, the __eh_pc variable must have been set up
1179 to contain the PC being thrown from. This address is used by
1180 __throw to determine which exception region (if any) is
1181 responsible for handling the exception. */
1182
1183 void
1184 emit_throw ()
1185 {
1186 if (exceptions_via_longjmp)
1187 {
1188 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1189 }
1190 else
1191 {
1192 #ifdef JUMP_TO_THROW
1193 emit_indirect_jump (throw_libfunc);
1194 #else
1195 #ifndef DWARF2_UNWIND_INFO
1196 /* Prevent assemble_external from doing anything with this symbol. */
1197 SYMBOL_REF_USED (throw_libfunc) = 1;
1198 #endif
1199 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1200 #endif
1201 throw_used = 1;
1202 }
1203 emit_barrier ();
1204 }
1205
1206 /* Throw the current exception. If appropriate, this is done by jumping
1207 to the next handler. */
1208
1209 void
1210 expand_internal_throw ()
1211 {
1212 #ifndef DWARF2_UNWIND_INFO
1213 if (! exceptions_via_longjmp)
1214 {
1215 rtx label = gen_label_rtx ();
1216 emit_label (label);
1217 label = gen_rtx (LABEL_REF, Pmode, label);
1218 assemble_external (eh_saved_pc);
1219 emit_move_insn (eh_saved_pc_rtx, label);
1220 }
1221 #endif
1222 emit_throw ();
1223 }
1224
1225 /* Called from expand_exception_blocks and expand_end_catch_block to
1226 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1227
1228 void
1229 expand_leftover_cleanups ()
1230 {
1231 struct eh_entry *entry;
1232
1233 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1234 {
1235 rtx prev;
1236
1237 /* A leftover try block. Shouldn't be one here. */
1238 if (entry->finalization == integer_zero_node)
1239 abort ();
1240
1241 /* Output the label for the start of the exception handler. */
1242 emit_label (entry->exception_handler_label);
1243
1244 #ifdef HAVE_exception_receiver
1245 if (! exceptions_via_longjmp)
1246 if (HAVE_exception_receiver)
1247 emit_insn (gen_exception_receiver ());
1248 #endif
1249
1250 #ifdef HAVE_nonlocal_goto_receiver
1251 if (! exceptions_via_longjmp)
1252 if (HAVE_nonlocal_goto_receiver)
1253 emit_insn (gen_nonlocal_goto_receiver ());
1254 #endif
1255
1256 /* And now generate the insns for the handler. */
1257 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1258
1259 prev = get_last_insn ();
1260 if (prev == NULL || GET_CODE (prev) != BARRIER)
1261 /* Emit code to throw to the outer context if we fall off
1262 the end of the handler. */
1263 expand_rethrow (entry->outer_context);
1264
1265 do_pending_stack_adjust ();
1266 free (entry);
1267 }
1268 }
1269
1270 /* Called at the start of a block of try statements. */
1271 void
1272 expand_start_try_stmts ()
1273 {
1274 if (! doing_eh (1))
1275 return;
1276
1277 expand_eh_region_start ();
1278 }
1279
1280 /* Generate RTL for the start of a group of catch clauses.
1281
1282 It is responsible for starting a new instruction sequence for the
1283 instructions in the catch block, and expanding the handlers for the
1284 internally-generated exception regions nested within the try block
1285 corresponding to this catch block. */
1286
1287 void
1288 expand_start_all_catch ()
1289 {
1290 struct eh_entry *entry;
1291 tree label;
1292 rtx outer_context;
1293
1294 if (! doing_eh (1))
1295 return;
1296
1297 outer_context = ehstack.top->entry->outer_context;
1298
1299 /* End the try block. */
1300 expand_eh_region_end (integer_zero_node);
1301
1302 emit_line_note (input_filename, lineno);
1303 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1304
1305 /* The label for the exception handling block that we will save.
1306 This is Lresume in the documention. */
1307 expand_label (label);
1308
1309 /* Push the label that points to where normal flow is resumed onto
1310 the top of the label stack. */
1311 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1312
1313 /* Start a new sequence for all the catch blocks. We will add this
1314 to the global sequence catch_clauses when we have completed all
1315 the handlers in this handler-seq. */
1316 start_sequence ();
1317
1318 while (1)
1319 {
1320 rtx prev;
1321
1322 entry = dequeue_eh_entry (&ehqueue);
1323 /* Emit the label for the exception handler for this region, and
1324 expand the code for the handler.
1325
1326 Note that a catch region is handled as a side-effect here;
1327 for a try block, entry->finalization will contain
1328 integer_zero_node, so no code will be generated in the
1329 expand_expr call below. But, the label for the handler will
1330 still be emitted, so any code emitted after this point will
1331 end up being the handler. */
1332 emit_label (entry->exception_handler_label);
1333
1334 #ifdef HAVE_exception_receiver
1335 if (! exceptions_via_longjmp)
1336 if (HAVE_exception_receiver)
1337 emit_insn (gen_exception_receiver ());
1338 #endif
1339
1340 #ifdef HAVE_nonlocal_goto_receiver
1341 if (! exceptions_via_longjmp)
1342 if (HAVE_nonlocal_goto_receiver)
1343 emit_insn (gen_nonlocal_goto_receiver ());
1344 #endif
1345
1346 /* When we get down to the matching entry for this try block, stop. */
1347 if (entry->finalization == integer_zero_node)
1348 {
1349 /* Don't forget to free this entry. */
1350 free (entry);
1351 break;
1352 }
1353
1354 /* And now generate the insns for the handler. */
1355 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1356
1357 prev = get_last_insn ();
1358 if (prev == NULL || GET_CODE (prev) != BARRIER)
1359 /* Code to throw out to outer context when we fall off end
1360 of the handler. We can't do this here for catch blocks,
1361 so it's done in expand_end_all_catch instead. */
1362 expand_rethrow (entry->outer_context);
1363
1364 do_pending_stack_adjust ();
1365 free (entry);
1366 }
1367
1368 /* If we are not doing setjmp/longjmp EH, because we are reordered
1369 out of line, we arrange to rethrow in the outer context. We need to
1370 do this because we are not physically within the region, if any, that
1371 logically contains this catch block. */
1372 if (! exceptions_via_longjmp)
1373 {
1374 expand_eh_region_start ();
1375 ehstack.top->entry->outer_context = outer_context;
1376 }
1377 }
1378
1379 /* Finish up the catch block. At this point all the insns for the
1380 catch clauses have already been generated, so we only have to add
1381 them to the catch_clauses list. We also want to make sure that if
1382 we fall off the end of the catch clauses that we rethrow to the
1383 outer EH region. */
1384
1385 void
1386 expand_end_all_catch ()
1387 {
1388 rtx new_catch_clause, outer_context = NULL_RTX;
1389
1390 if (! doing_eh (1))
1391 return;
1392
1393 if (! exceptions_via_longjmp)
1394 {
1395 outer_context = ehstack.top->entry->outer_context;
1396
1397 /* Finish the rethrow region. size_zero_node is just a NOP. */
1398 expand_eh_region_end (size_zero_node);
1399 }
1400
1401 /* Code to throw out to outer context, if we fall off end of catch
1402 handlers. This is rethrow (Lresume, same id, same obj) in the
1403 documentation. We use Lresume because we know that it will throw
1404 to the correct context.
1405
1406 In other words, if the catch handler doesn't exit or return, we
1407 do a "throw" (using the address of Lresume as the point being
1408 thrown from) so that the outer EH region can then try to process
1409 the exception. */
1410 expand_rethrow (outer_context);
1411
1412 /* Now we have the complete catch sequence. */
1413 new_catch_clause = get_insns ();
1414 end_sequence ();
1415
1416 /* This level of catch blocks is done, so set up the successful
1417 catch jump label for the next layer of catch blocks. */
1418 pop_label_entry (&caught_return_label_stack);
1419
1420 /* Add the new sequence of catches to the main one for this function. */
1421 push_to_sequence (catch_clauses);
1422 emit_insns (new_catch_clause);
1423 catch_clauses = get_insns ();
1424 end_sequence ();
1425
1426 /* Here we fall through into the continuation code. */
1427 }
1428
1429 /* Rethrow from the outer context LABEL. */
1430
1431 static void
1432 expand_rethrow (label)
1433 rtx label;
1434 {
1435 if (exceptions_via_longjmp)
1436 emit_throw ();
1437 else
1438 emit_jump (label);
1439 }
1440
1441 /* End all the pending exception regions on protect_list. The handlers
1442 will be emitted when expand_leftover_cleanups is invoked. */
1443
1444 void
1445 end_protect_partials ()
1446 {
1447 while (protect_list)
1448 {
1449 expand_eh_region_end (TREE_VALUE (protect_list));
1450 protect_list = TREE_CHAIN (protect_list);
1451 }
1452 }
1453
1454 /* Arrange for __terminate to be called if there is an unhandled throw
1455 from within E. */
1456
1457 tree
1458 protect_with_terminate (e)
1459 tree e;
1460 {
1461 /* We only need to do this when using setjmp/longjmp EH and the
1462 language requires it, as otherwise we protect all of the handlers
1463 at once, if we need to. */
1464 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1465 {
1466 tree handler, result;
1467
1468 /* All cleanups must be on the function_obstack. */
1469 push_obstacks_nochange ();
1470 resume_temporary_allocation ();
1471
1472 handler = make_node (RTL_EXPR);
1473 TREE_TYPE (handler) = void_type_node;
1474 RTL_EXPR_RTL (handler) = const0_rtx;
1475 TREE_SIDE_EFFECTS (handler) = 1;
1476 start_sequence_for_rtl_expr (handler);
1477
1478 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1479 emit_barrier ();
1480
1481 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1482 end_sequence ();
1483
1484 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1485 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1486 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1487 TREE_READONLY (result) = TREE_READONLY (e);
1488
1489 pop_obstacks ();
1490
1491 e = result;
1492 }
1493
1494 return e;
1495 }
1496 \f
1497 /* The exception table that we build that is used for looking up and
1498 dispatching exceptions, the current number of entries, and its
1499 maximum size before we have to extend it.
1500
1501 The number in eh_table is the code label number of the exception
1502 handler for the region. This is added by add_eh_table_entry and
1503 used by output_exception_table_entry. */
1504
1505 static int *eh_table;
1506 static int eh_table_size;
1507 static int eh_table_max_size;
1508
1509 /* Note the need for an exception table entry for region N. If we
1510 don't need to output an explicit exception table, avoid all of the
1511 extra work.
1512
1513 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1514 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1515 label number of the exception handler for the region. */
1516
1517 void
1518 add_eh_table_entry (n)
1519 int n;
1520 {
1521 #ifndef OMIT_EH_TABLE
1522 if (eh_table_size >= eh_table_max_size)
1523 {
1524 if (eh_table)
1525 {
1526 eh_table_max_size += eh_table_max_size>>1;
1527
1528 if (eh_table_max_size < 0)
1529 abort ();
1530
1531 eh_table = (int *) xrealloc (eh_table,
1532 eh_table_max_size * sizeof (int));
1533 }
1534 else
1535 {
1536 eh_table_max_size = 252;
1537 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1538 }
1539 }
1540 eh_table[eh_table_size++] = n;
1541 #endif
1542 }
1543
1544 /* Return a non-zero value if we need to output an exception table.
1545
1546 On some platforms, we don't have to output a table explicitly.
1547 This routine doesn't mean we don't have one. */
1548
1549 int
1550 exception_table_p ()
1551 {
1552 if (eh_table)
1553 return 1;
1554
1555 return 0;
1556 }
1557
1558 /* 1 if we need a static constructor to register EH table info. */
1559
1560 int
1561 register_exception_table_p ()
1562 {
1563 #if defined (DWARF2_UNWIND_INFO)
1564 return 0;
1565 #endif
1566
1567 return exception_table_p ();
1568 }
1569
1570 /* Output the entry of the exception table corresponding to to the
1571 exception region numbered N to file FILE.
1572
1573 N is the code label number corresponding to the handler of the
1574 region. */
1575
1576 static void
1577 output_exception_table_entry (file, n)
1578 FILE *file;
1579 int n;
1580 {
1581 char buf[256];
1582 rtx sym;
1583
1584 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1585 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1586 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1587
1588 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1589 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1590 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1591
1592 ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1593 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1594 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1595
1596 putc ('\n', file); /* blank line */
1597 }
1598
1599 /* Output the exception table if we have and need one. */
1600
1601 void
1602 output_exception_table ()
1603 {
1604 int i;
1605 extern FILE *asm_out_file;
1606
1607 if (! doing_eh (0) || ! eh_table)
1608 return;
1609
1610 exception_section ();
1611
1612 /* Beginning marker for table. */
1613 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1614 assemble_label ("__EXCEPTION_TABLE__");
1615
1616 for (i = 0; i < eh_table_size; ++i)
1617 output_exception_table_entry (asm_out_file, eh_table[i]);
1618
1619 free (eh_table);
1620
1621 /* Ending marker for table. */
1622 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1623 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1624 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1625 putc ('\n', asm_out_file); /* blank line */
1626 }
1627
1628 /* Generate code to initialize the exception table at program startup
1629 time. */
1630
1631 void
1632 register_exception_table ()
1633 {
1634 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1635 VOIDmode, 1,
1636 gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1637 Pmode);
1638 }
1639 \f
1640 /* Emit the RTL for the start of the per-function unwinder for the
1641 current function. See emit_unwinder for further information.
1642
1643 DOESNT_NEED_UNWINDER is a target-specific macro that determines if
1644 the current function actually needs a per-function unwinder or not.
1645 By default, all functions need one. */
1646
1647 void
1648 start_eh_unwinder ()
1649 {
1650 #ifdef DOESNT_NEED_UNWINDER
1651 if (DOESNT_NEED_UNWINDER)
1652 return;
1653 #endif
1654
1655 /* If we are using the setjmp/longjmp implementation, we don't need a
1656 per function unwinder. */
1657
1658 if (exceptions_via_longjmp)
1659 return;
1660
1661 #ifdef DWARF2_UNWIND_INFO
1662 return;
1663 #endif
1664
1665 expand_eh_region_start ();
1666 }
1667
1668 /* Emit insns for the end of the per-function unwinder for the
1669 current function. */
1670
1671 void
1672 end_eh_unwinder ()
1673 {
1674 tree expr;
1675 rtx return_val_rtx, ret_val, label, end, insns;
1676
1677 if (! doing_eh (0))
1678 return;
1679
1680 #ifdef DOESNT_NEED_UNWINDER
1681 if (DOESNT_NEED_UNWINDER)
1682 return;
1683 #endif
1684
1685 /* If we are using the setjmp/longjmp implementation, we don't need a
1686 per function unwinder. */
1687
1688 if (exceptions_via_longjmp)
1689 return;
1690
1691 #ifdef DWARF2_UNWIND_INFO
1692 return;
1693 #else /* DWARF2_UNWIND_INFO */
1694
1695 assemble_external (eh_saved_pc);
1696
1697 expr = make_node (RTL_EXPR);
1698 TREE_TYPE (expr) = void_type_node;
1699 RTL_EXPR_RTL (expr) = const0_rtx;
1700 TREE_SIDE_EFFECTS (expr) = 1;
1701 start_sequence_for_rtl_expr (expr);
1702
1703 /* ret_val will contain the address of the code where the call
1704 to the current function occurred. */
1705 ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
1706 0, hard_frame_pointer_rtx);
1707 return_val_rtx = copy_to_reg (ret_val);
1708
1709 /* Get the address we need to use to determine what exception
1710 handler should be invoked, and store it in __eh_pc. */
1711 return_val_rtx = eh_outer_context (return_val_rtx);
1712 return_val_rtx = expand_binop (Pmode, sub_optab, return_val_rtx, GEN_INT (1),
1713 NULL_RTX, 0, OPTAB_LIB_WIDEN);
1714 emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
1715
1716 /* Either set things up so we do a return directly to __throw, or
1717 we return here instead. */
1718 #ifdef JUMP_TO_THROW
1719 emit_move_insn (ret_val, throw_libfunc);
1720 #else
1721 label = gen_label_rtx ();
1722 emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
1723 #endif
1724
1725 #ifdef RETURN_ADDR_OFFSET
1726 return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
1727 if (return_val_rtx != ret_val)
1728 emit_move_insn (ret_val, return_val_rtx);
1729 #endif
1730
1731 end = gen_label_rtx ();
1732 emit_jump (end);
1733
1734 RTL_EXPR_SEQUENCE (expr) = get_insns ();
1735 end_sequence ();
1736
1737 expand_eh_region_end (expr);
1738
1739 emit_jump (end);
1740
1741 #ifndef JUMP_TO_THROW
1742 emit_label (label);
1743 emit_throw ();
1744 #endif
1745
1746 expand_leftover_cleanups ();
1747
1748 emit_label (end);
1749
1750 #ifdef HAVE_return
1751 if (HAVE_return)
1752 {
1753 emit_jump_insn (gen_return ());
1754 emit_barrier ();
1755 }
1756 #endif
1757 #endif /* DWARF2_UNWIND_INFO */
1758 }
1759
1760 /* If necessary, emit insns for the per function unwinder for the
1761 current function. Called after all the code that needs unwind
1762 protection is output.
1763
1764 The unwinder takes care of catching any exceptions that have not
1765 been previously caught within the function, unwinding the stack to
1766 the next frame, and rethrowing using the address of the current
1767 function's caller as the context of the throw.
1768
1769 On some platforms __throw can do this by itself (or with the help
1770 of __unwind_function) so the per-function unwinder is
1771 unnecessary.
1772
1773 We cannot place the unwinder into the function until after we know
1774 we are done inlining, as we don't want to have more than one
1775 unwinder per non-inlined function. */
1776
1777 void
1778 emit_unwinder ()
1779 {
1780 rtx insns, insn;
1781
1782 start_sequence ();
1783 start_eh_unwinder ();
1784 insns = get_insns ();
1785 end_sequence ();
1786
1787 /* We place the start of the exception region associated with the
1788 per function unwinder at the top of the function. */
1789 if (insns)
1790 emit_insns_after (insns, get_insns ());
1791
1792 start_sequence ();
1793 end_eh_unwinder ();
1794 insns = get_insns ();
1795 end_sequence ();
1796
1797 /* And we place the end of the exception region before the USE and
1798 CLOBBER insns that may come at the end of the function. */
1799 if (insns == 0)
1800 return;
1801
1802 insn = get_last_insn ();
1803 while (GET_CODE (insn) == NOTE
1804 || (GET_CODE (insn) == INSN
1805 && (GET_CODE (PATTERN (insn)) == USE
1806 || GET_CODE (PATTERN (insn)) == CLOBBER)))
1807 insn = PREV_INSN (insn);
1808
1809 if (GET_CODE (insn) == CODE_LABEL
1810 && GET_CODE (PREV_INSN (insn)) == BARRIER)
1811 {
1812 insn = PREV_INSN (insn);
1813 }
1814 else
1815 {
1816 rtx label = gen_label_rtx ();
1817 emit_label_after (label, insn);
1818 insn = emit_jump_insn_after (gen_jump (label), insn);
1819 insn = emit_barrier_after (insn);
1820 }
1821
1822 emit_insns_after (insns, insn);
1823 }
1824
1825 /* Scan the current insns and build a list of handler labels. The
1826 resulting list is placed in the global variable exception_handler_labels.
1827
1828 It is called after the last exception handling region is added to
1829 the current function (when the rtl is almost all built for the
1830 current function) and before the jump optimization pass. */
1831
1832 void
1833 find_exception_handler_labels ()
1834 {
1835 rtx insn;
1836 int max_labelno = max_label_num ();
1837 int min_labelno = get_first_label_num ();
1838 rtx *labels;
1839
1840 exception_handler_labels = NULL_RTX;
1841
1842 /* If we aren't doing exception handling, there isn't much to check. */
1843 if (! doing_eh (0))
1844 return;
1845
1846 /* Generate a handy reference to each label. */
1847
1848 /* We call xmalloc here instead of alloca; we did the latter in the past,
1849 but found that it can sometimes end up being asked to allocate space
1850 for more than 1 million labels. */
1851 labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1852 bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1853
1854 /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
1855 labels -= min_labelno;
1856
1857 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1858 {
1859 if (GET_CODE (insn) == CODE_LABEL)
1860 if (CODE_LABEL_NUMBER (insn) >= min_labelno
1861 && CODE_LABEL_NUMBER (insn) < max_labelno)
1862 labels[CODE_LABEL_NUMBER (insn)] = insn;
1863 }
1864
1865 /* For each start of a region, add its label to the list. */
1866
1867 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1868 {
1869 if (GET_CODE (insn) == NOTE
1870 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1871 {
1872 rtx label = NULL_RTX;
1873
1874 if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1875 && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1876 {
1877 label = labels[NOTE_BLOCK_NUMBER (insn)];
1878
1879 if (label)
1880 exception_handler_labels
1881 = gen_rtx (EXPR_LIST, VOIDmode,
1882 label, exception_handler_labels);
1883 else
1884 warning ("didn't find handler for EH region %d",
1885 NOTE_BLOCK_NUMBER (insn));
1886 }
1887 else
1888 warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1889 }
1890 }
1891
1892 free (labels + min_labelno);
1893 }
1894
1895 /* Perform sanity checking on the exception_handler_labels list.
1896
1897 Can be called after find_exception_handler_labels is called to
1898 build the list of exception handlers for the current function and
1899 before we finish processing the current function. */
1900
1901 void
1902 check_exception_handler_labels ()
1903 {
1904 rtx insn, handler;
1905
1906 /* If we aren't doing exception handling, there isn't much to check. */
1907 if (! doing_eh (0))
1908 return;
1909
1910 /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1911 in each handler corresponds to the CODE_LABEL_NUMBER of the
1912 handler. */
1913
1914 for (handler = exception_handler_labels;
1915 handler;
1916 handler = XEXP (handler, 1))
1917 {
1918 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1919 {
1920 if (GET_CODE (insn) == CODE_LABEL)
1921 {
1922 if (CODE_LABEL_NUMBER (insn)
1923 == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1924 {
1925 if (insn != XEXP (handler, 0))
1926 warning ("mismatched handler %d",
1927 CODE_LABEL_NUMBER (insn));
1928 break;
1929 }
1930 }
1931 }
1932 if (insn == NULL_RTX)
1933 warning ("handler not found %d",
1934 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1935 }
1936
1937 /* Now go through and make sure that for each region there is a
1938 corresponding label. */
1939 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1940 {
1941 if (GET_CODE (insn) == NOTE
1942 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1943 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1944 {
1945 for (handler = exception_handler_labels;
1946 handler;
1947 handler = XEXP (handler, 1))
1948 {
1949 if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1950 == NOTE_BLOCK_NUMBER (insn))
1951 break;
1952 }
1953 if (handler == NULL_RTX)
1954 warning ("region exists, no handler %d",
1955 NOTE_BLOCK_NUMBER (insn));
1956 }
1957 }
1958 }
1959 \f
1960 /* This group of functions initializes the exception handling data
1961 structures at the start of the compilation, initializes the data
1962 structures at the start of a function, and saves and restores the
1963 exception handling data structures for the start/end of a nested
1964 function. */
1965
1966 /* Toplevel initialization for EH things. */
1967
1968 void
1969 init_eh ()
1970 {
1971 /* Generate rtl to reference the variable in which the PC of the
1972 current context is saved. */
1973 tree type = build_pointer_type (make_node (VOID_TYPE));
1974
1975 #ifndef DWARF2_UNWIND_INFO
1976 eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
1977 DECL_EXTERNAL (eh_saved_pc) = 1;
1978 TREE_PUBLIC (eh_saved_pc) = 1;
1979 make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
1980 eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
1981 #endif
1982 }
1983
1984 /* Initialize the per-function EH information. */
1985
1986 void
1987 init_eh_for_function ()
1988 {
1989 ehstack.top = 0;
1990 ehqueue.head = ehqueue.tail = 0;
1991 catch_clauses = NULL_RTX;
1992 false_label_stack = 0;
1993 caught_return_label_stack = 0;
1994 protect_list = NULL_TREE;
1995 current_function_dhc = NULL_RTX;
1996 current_function_dcc = NULL_RTX;
1997 }
1998
1999 /* Save some of the per-function EH info into the save area denoted by
2000 P.
2001
2002 This is currently called from save_stmt_status. */
2003
2004 void
2005 save_eh_status (p)
2006 struct function *p;
2007 {
2008 if (p == NULL)
2009 abort ();
2010
2011 p->ehstack = ehstack;
2012 p->ehqueue = ehqueue;
2013 p->catch_clauses = catch_clauses;
2014 p->false_label_stack = false_label_stack;
2015 p->caught_return_label_stack = caught_return_label_stack;
2016 p->protect_list = protect_list;
2017 p->dhc = current_function_dhc;
2018 p->dcc = current_function_dcc;
2019
2020 init_eh ();
2021 }
2022
2023 /* Restore the per-function EH info saved into the area denoted by P.
2024
2025 This is currently called from restore_stmt_status. */
2026
2027 void
2028 restore_eh_status (p)
2029 struct function *p;
2030 {
2031 if (p == NULL)
2032 abort ();
2033
2034 protect_list = p->protect_list;
2035 caught_return_label_stack = p->caught_return_label_stack;
2036 false_label_stack = p->false_label_stack;
2037 catch_clauses = p->catch_clauses;
2038 ehqueue = p->ehqueue;
2039 ehstack = p->ehstack;
2040 current_function_dhc = p->dhc;
2041 current_function_dcc = p->dcc;
2042 }
2043 \f
2044 /* This section is for the exception handling specific optimization
2045 pass. First are the internal routines, and then the main
2046 optimization pass. */
2047
2048 /* Determine if the given INSN can throw an exception. */
2049
2050 static int
2051 can_throw (insn)
2052 rtx insn;
2053 {
2054 /* Calls can always potentially throw exceptions. */
2055 if (GET_CODE (insn) == CALL_INSN)
2056 return 1;
2057
2058 if (asynchronous_exceptions)
2059 {
2060 /* If we wanted asynchronous exceptions, then everything but NOTEs
2061 and CODE_LABELs could throw. */
2062 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2063 return 1;
2064 }
2065
2066 return 0;
2067 }
2068
2069 /* Scan a exception region looking for the matching end and then
2070 remove it if possible. INSN is the start of the region, N is the
2071 region number, and DELETE_OUTER is to note if anything in this
2072 region can throw.
2073
2074 Regions are removed if they cannot possibly catch an exception.
2075 This is determined by invoking can_throw on each insn within the
2076 region; if can_throw returns true for any of the instructions, the
2077 region can catch an exception, since there is an insn within the
2078 region that is capable of throwing an exception.
2079
2080 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2081 calls abort if it can't find one.
2082
2083 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2084 correspond to the region number, or if DELETE_OUTER is NULL. */
2085
2086 static rtx
2087 scan_region (insn, n, delete_outer)
2088 rtx insn;
2089 int n;
2090 int *delete_outer;
2091 {
2092 rtx start = insn;
2093
2094 /* Assume we can delete the region. */
2095 int delete = 1;
2096
2097 if (insn == NULL_RTX
2098 || GET_CODE (insn) != NOTE
2099 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
2100 || NOTE_BLOCK_NUMBER (insn) != n
2101 || delete_outer == NULL)
2102 abort ();
2103
2104 insn = NEXT_INSN (insn);
2105
2106 /* Look for the matching end. */
2107 while (! (GET_CODE (insn) == NOTE
2108 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2109 {
2110 /* If anything can throw, we can't remove the region. */
2111 if (delete && can_throw (insn))
2112 {
2113 delete = 0;
2114 }
2115
2116 /* Watch out for and handle nested regions. */
2117 if (GET_CODE (insn) == NOTE
2118 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2119 {
2120 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2121 }
2122
2123 insn = NEXT_INSN (insn);
2124 }
2125
2126 /* The _BEG/_END NOTEs must match and nest. */
2127 if (NOTE_BLOCK_NUMBER (insn) != n)
2128 abort ();
2129
2130 /* If anything in this exception region can throw, we can throw. */
2131 if (! delete)
2132 *delete_outer = 0;
2133 else
2134 {
2135 /* Delete the start and end of the region. */
2136 delete_insn (start);
2137 delete_insn (insn);
2138
2139 /* Only do this part if we have built the exception handler
2140 labels. */
2141 if (exception_handler_labels)
2142 {
2143 rtx x, *prev = &exception_handler_labels;
2144
2145 /* Find it in the list of handlers. */
2146 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2147 {
2148 rtx label = XEXP (x, 0);
2149 if (CODE_LABEL_NUMBER (label) == n)
2150 {
2151 /* If we are the last reference to the handler,
2152 delete it. */
2153 if (--LABEL_NUSES (label) == 0)
2154 delete_insn (label);
2155
2156 if (optimize)
2157 {
2158 /* Remove it from the list of exception handler
2159 labels, if we are optimizing. If we are not, then
2160 leave it in the list, as we are not really going to
2161 remove the region. */
2162 *prev = XEXP (x, 1);
2163 XEXP (x, 1) = 0;
2164 XEXP (x, 0) = 0;
2165 }
2166
2167 break;
2168 }
2169 prev = &XEXP (x, 1);
2170 }
2171 }
2172 }
2173 return insn;
2174 }
2175
2176 /* Perform various interesting optimizations for exception handling
2177 code.
2178
2179 We look for empty exception regions and make them go (away). The
2180 jump optimization code will remove the handler if nothing else uses
2181 it. */
2182
2183 void
2184 exception_optimize ()
2185 {
2186 rtx insn, regions = NULL_RTX;
2187 int n;
2188
2189 /* Remove empty regions. */
2190 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2191 {
2192 if (GET_CODE (insn) == NOTE
2193 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2194 {
2195 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2196 insn, we will indirectly skip through all the insns
2197 inbetween. We are also guaranteed that the value of insn
2198 returned will be valid, as otherwise scan_region won't
2199 return. */
2200 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2201 }
2202 }
2203 }
2204 \f
2205 /* Various hooks for the DWARF 2 __throw routine. */
2206
2207 /* Do any necessary initialization to access arbitrary stack frames.
2208 On the SPARC, this means flushing the register windows. */
2209
2210 void
2211 expand_builtin_unwind_init ()
2212 {
2213 /* Set this so all the registers get saved in our frame; we need to be
2214 able to copy the saved values for any registers from frames we unwind. */
2215 current_function_has_nonlocal_label = 1;
2216
2217 #ifdef SETUP_FRAME_ADDRESSES
2218 SETUP_FRAME_ADDRESSES ();
2219 #endif
2220 }
2221
2222 /* Given a value extracted from the return address register or stack slot,
2223 return the actual address encoded in that value. */
2224
2225 rtx
2226 expand_builtin_extract_return_addr (addr_tree)
2227 tree addr_tree;
2228 {
2229 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2230 return eh_outer_context (addr);
2231 }
2232
2233 /* Given an actual address in addr_tree, do any necessary encoding
2234 and return the value to be stored in the return address register or
2235 stack slot so the epilogue will return to that address. */
2236
2237 rtx
2238 expand_builtin_frob_return_addr (addr_tree)
2239 tree addr_tree;
2240 {
2241 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2242 #ifdef RETURN_ADDR_OFFSET
2243 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2244 #endif
2245 return addr;
2246 }
2247
2248 /* Given an actual address in addr_tree, set the return address register up
2249 so the epilogue will return to that address. If the return address is
2250 not in a register, do nothing. */
2251
2252 void
2253 expand_builtin_set_return_addr_reg (addr_tree)
2254 tree addr_tree;
2255 {
2256 rtx tmp;
2257 rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2258 0, hard_frame_pointer_rtx);
2259
2260 if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2261 return;
2262
2263 tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2264 if (tmp != ra)
2265 emit_move_insn (ra, tmp);
2266 }
2267
2268 /* Choose two registers for communication between the main body of
2269 __throw and the stub for adjusting the stack pointer. The first register
2270 is used to pass the address of the exception handler; the second register
2271 is used to pass the stack pointer offset.
2272
2273 For register 1 we use the return value register for a void *.
2274 For register 2 we use the static chain register if it exists and is
2275 different from register 1, otherwise some arbitrary call-clobbered
2276 register. */
2277
2278 static void
2279 eh_regs (r1, r2, outgoing)
2280 rtx *r1, *r2;
2281 int outgoing;
2282 {
2283 rtx reg1, reg2;
2284
2285 #ifdef FUNCTION_OUTGOING_VALUE
2286 if (outgoing)
2287 reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2288 current_function_decl);
2289 else
2290 #endif
2291 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2292 current_function_decl);
2293
2294 #ifdef STATIC_CHAIN_REGNUM
2295 if (outgoing)
2296 reg2 = static_chain_incoming_rtx;
2297 else
2298 reg2 = static_chain_rtx;
2299 if (REGNO (reg2) == REGNO (reg1))
2300 #endif /* STATIC_CHAIN_REGNUM */
2301 reg2 = NULL_RTX;
2302
2303 if (reg2 == NULL_RTX)
2304 {
2305 int i;
2306 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2307 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2308 {
2309 reg2 = gen_rtx (REG, Pmode, i);
2310 break;
2311 }
2312
2313 if (reg2 == NULL_RTX)
2314 abort ();
2315 }
2316
2317 *r1 = reg1;
2318 *r2 = reg2;
2319 }
2320
2321 /* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2322 to the exception handler. __throw will set up the necessary values
2323 and then return to the stub. */
2324
2325 rtx
2326 expand_builtin_eh_stub ()
2327 {
2328 rtx stub_start = gen_label_rtx ();
2329 rtx after_stub = gen_label_rtx ();
2330 rtx handler, offset, temp;
2331
2332 emit_jump (after_stub);
2333 emit_label (stub_start);
2334
2335 eh_regs (&handler, &offset, 0);
2336
2337 adjust_stack (offset);
2338 emit_indirect_jump (handler);
2339
2340 emit_label (after_stub);
2341 return gen_rtx (LABEL_REF, Pmode, stub_start);
2342 }
2343
2344 /* Set up the registers for passing the handler address and stack offset
2345 to the stub above. */
2346
2347 void
2348 expand_builtin_set_eh_regs (handler, offset)
2349 tree handler, offset;
2350 {
2351 rtx reg1, reg2;
2352
2353 eh_regs (&reg1, &reg2, 1);
2354
2355 store_expr (offset, reg2, 0);
2356 store_expr (handler, reg1, 0);
2357
2358 /* These will be used by the stub. */
2359 emit_insn (gen_rtx (USE, VOIDmode, reg1));
2360 emit_insn (gen_rtx (USE, VOIDmode, reg2));
2361 }
This page took 0.14484 seconds and 6 git commands to generate.