]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
./: * libgcc2.c (__throw): Use __builtin_return_addr instead of __eh_pc.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-96, 1997 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code assocated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fno-sjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementaion, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 If the target does not use the DWARF 2 frame unwind information, at
91 program startup each object file invokes a function named
92 __register_exceptions with the address of its local
93 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94 is responsible for recording all of the exception regions into one list
95 (which is kept in a static variable named exception_table_list).
96
97 On targets that support crtstuff.c, the unwind information
98 is stored in a section named .eh_frame and the information for the
99 entire shared object or program is registered with a call to
100 __register_frame. On other targets, the information for each
101 translation unit is registered from the file generated by collect2.
102 __register_frame is defined in frame.c, and is responsible for
103 recording all of the unwind regions into one list (which is kept in a
104 static variable named unwind_table_list).
105
106 The function __throw is actually responsible for doing the
107 throw. On machines that have unwind info support, __throw is generated
108 by code in libgcc2.c, otherwise __throw is generated on a
109 per-object-file basis for each source file compiled with
110 -fexceptions by the the C++ frontend. Before __throw is invoked,
111 the current context of the throw needs to be placed in the global
112 variable __eh_pc.
113
114 __throw attempts to find the appropriate exception handler for the
115 PC value stored in __eh_pc by calling __find_first_exception_table_match
116 (which is defined in libgcc2.c). If __find_first_exception_table_match
117 finds a relevant handler, __throw transfers control directly to it.
118
119 If a handler for the context being thrown from can't be found, __throw
120 walks (see Walking the stack below) the stack up the dynamic call chain to
121 continue searching for an appropriate exception handler based upon the
122 caller of the function it last sought a exception handler for. It stops
123 then either an exception handler is found, or when the top of the
124 call chain is reached.
125
126 If no handler is found, an external library function named
127 __terminate is called. If a handler is found, then we restart
128 our search for a handler at the end of the call chain, and repeat
129 the search process, but instead of just walking up the call chain,
130 we unwind the call chain as we walk up it.
131
132 Internal implementation details:
133
134 To associate a user-defined handler with a block of statements, the
135 function expand_start_try_stmts is used to mark the start of the
136 block of statements with which the handler is to be associated
137 (which is known as a "try block"). All statements that appear
138 afterwards will be associated with the try block.
139
140 A call to expand_start_all_catch marks the end of the try block,
141 and also marks the start of the "catch block" (the user-defined
142 handler) associated with the try block.
143
144 This user-defined handler will be invoked for *every* exception
145 thrown with the context of the try block. It is up to the handler
146 to decide whether or not it wishes to handle any given exception,
147 as there is currently no mechanism in this implementation for doing
148 this. (There are plans for conditionally processing an exception
149 based on its "type", which will provide a language-independent
150 mechanism).
151
152 If the handler chooses not to process the exception (perhaps by
153 looking at an "exception type" or some other additional data
154 supplied with the exception), it can fall through to the end of the
155 handler. expand_end_all_catch and expand_leftover_cleanups
156 add additional code to the end of each handler to take care of
157 rethrowing to the outer exception handler.
158
159 The handler also has the option to continue with "normal flow of
160 code", or in other words to resume executing at the statement
161 immediately after the end of the exception region. The variable
162 caught_return_label_stack contains a stack of labels, and jumping
163 to the topmost entry's label via expand_goto will resume normal
164 flow to the statement immediately after the end of the exception
165 region. If the handler falls through to the end, the exception will
166 be rethrown to the outer exception region.
167
168 The instructions for the catch block are kept as a separate
169 sequence, and will be emitted at the end of the function along with
170 the handlers specified via expand_eh_region_end. The end of the
171 catch block is marked with expand_end_all_catch.
172
173 Any data associated with the exception must currently be handled by
174 some external mechanism maintained in the frontend. For example,
175 the C++ exception mechanism passes an arbitrary value along with
176 the exception, and this is handled in the C++ frontend by using a
177 global variable to hold the value. (This will be changing in the
178 future.)
179
180 The mechanism in C++ for handling data associated with the
181 exception is clearly not thread-safe. For a thread-based
182 environment, another mechanism must be used (possibly using a
183 per-thread allocation mechanism if the size of the area that needs
184 to be allocated isn't known at compile time.)
185
186 Internally-generated exception regions (cleanups) are marked by
187 calling expand_eh_region_start to mark the start of the region,
188 and expand_eh_region_end (handler) is used to both designate the
189 end of the region and to associate a specified handler/cleanup with
190 the region. The rtl code in HANDLER will be invoked whenever an
191 exception occurs in the region between the calls to
192 expand_eh_region_start and expand_eh_region_end. After HANDLER is
193 executed, additional code is emitted to handle rethrowing the
194 exception to the outer exception handler. The code for HANDLER will
195 be emitted at the end of the function.
196
197 TARGET_EXPRs can also be used to designate exception regions. A
198 TARGET_EXPR gives an unwind-protect style interface commonly used
199 in functional languages such as LISP. The associated expression is
200 evaluated, and whether or not it (or any of the functions that it
201 calls) throws an exception, the protect expression is always
202 invoked. This implementation takes care of the details of
203 associating an exception table entry with the expression and
204 generating the necessary code (it actually emits the protect
205 expression twice, once for normal flow and once for the exception
206 case). As for the other handlers, the code for the exception case
207 will be emitted at the end of the function.
208
209 Cleanups can also be specified by using add_partial_entry (handler)
210 and end_protect_partials. add_partial_entry creates the start of
211 a new exception region; HANDLER will be invoked if an exception is
212 thrown with the context of the region between the calls to
213 add_partial_entry and end_protect_partials. end_protect_partials is
214 used to mark the end of these regions. add_partial_entry can be
215 called as many times as needed before calling end_protect_partials.
216 However, end_protect_partials should only be invoked once for each
217 group of calls to add_partial_entry as the entries are queued
218 and all of the outstanding entries are processed simultaneously
219 when end_protect_partials is invoked. Similarly to the other
220 handlers, the code for HANDLER will be emitted at the end of the
221 function.
222
223 The generated RTL for an exception region includes
224 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225 the start and end of the exception region. A unique label is also
226 generated at the start of the exception region, which is available
227 by looking at the ehstack variable. The topmost entry corresponds
228 to the current region.
229
230 In the current implementation, an exception can only be thrown from
231 a function call (since the mechanism used to actually throw an
232 exception involves calling __throw). If an exception region is
233 created but no function calls occur within that region, the region
234 can be safely optimized away (along with its exception handlers)
235 since no exceptions can ever be caught in that region. This
236 optimization is performed unless -fasynchronous-exceptions is
237 given. If the user wishes to throw from a signal handler, or other
238 asynchronous place, -fasynchronous-exceptions should be used when
239 compiling for maximally correct code, at the cost of additional
240 exception regions. Using -fasynchronous-exceptions only produces
241 code that is reasonably safe in such situations, but a correct
242 program cannot rely upon this working. It can be used in failsafe
243 code, where trying to continue on, and proceeding with potentially
244 incorrect results is better than halting the program.
245
246
247 Walking the stack:
248
249 The stack is walked by starting with a pointer to the current
250 frame, and finding the pointer to the callers frame. The unwind info
251 tells __throw how to find it.
252
253 Unwinding the stack:
254
255 When we use the term unwinding the stack, we mean undoing the
256 effects of the function prologue in a controlled fashion so that we
257 still have the flow of control. Otherwise, we could just return
258 (jump to the normal end of function epilogue).
259
260 This is done in __throw in libgcc2.c when we know that a handler exists
261 in a frame higher up the call stack than its immediate caller.
262
263 To unwind, we find the unwind data associated with the frame, if any.
264 If we don't find any, we call the library routine __terminate. If we do
265 find it, we use the information to copy the saved register values from
266 that frame into the register save area in the frame for __throw, return
267 into a stub which updates the stack pointer, and jump to the handler.
268 The normal function epilogue for __throw handles restoring the saved
269 values into registers.
270
271 When unwinding, we use this method if we know it will
272 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
273 an inline unwinder will have been emitted for any function that
274 __unwind_function cannot unwind. The inline unwinder appears as a
275 normal exception handler for the entire function, for any function
276 that we know cannot be unwound by __unwind_function. We inform the
277 compiler of whether a function can be unwound with
278 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279 when the unwinder isn't needed. __unwind_function is used as an
280 action of last resort. If no other method can be used for
281 unwinding, __unwind_function is used. If it cannot unwind, it
282 should call __teminate.
283
284 By default, if the target-specific backend doesn't supply a definition
285 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286 unwinders will be used instead. The main tradeoff here is in text space
287 utilization. Obviously, if inline unwinders have to be generated
288 repeatedly, this uses much more space than if a single routine is used.
289
290 However, it is simply not possible on some platforms to write a
291 generalized routine for doing stack unwinding without having some
292 form of additional data associated with each function. The current
293 implementation can encode this data in the form of additional
294 machine instructions or as static data in tabular form. The later
295 is called the unwind data.
296
297 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299 defined and has a non-zero value, a per-function unwinder is not emitted
300 for the current function. If the static unwind data is supported, then
301 a per-function unwinder is not emitted.
302
303 On some platforms it is possible that neither __unwind_function
304 nor inlined unwinders are available. For these platforms it is not
305 possible to throw through a function call, and abort will be
306 invoked instead of performing the throw.
307
308 The reason the unwind data may be needed is that on some platforms
309 the order and types of data stored on the stack can vary depending
310 on the type of function, its arguments and returned values, and the
311 compilation options used (optimization versus non-optimization,
312 -fomit-frame-pointer, processor variations, etc).
313
314 Unfortunately, this also means that throwing through functions that
315 aren't compiled with exception handling support will still not be
316 possible on some platforms. This problem is currently being
317 investigated, but no solutions have been found that do not imply
318 some unacceptable performance penalties.
319
320 Future directions:
321
322 Currently __throw makes no differentiation between cleanups and
323 user-defined exception regions. While this makes the implementation
324 simple, it also implies that it is impossible to determine if a
325 user-defined exception handler exists for a given exception without
326 completely unwinding the stack in the process. This is undesirable
327 from the standpoint of debugging, as ideally it would be possible
328 to trap unhandled exceptions in the debugger before the process of
329 unwinding has even started.
330
331 This problem can be solved by marking user-defined handlers in a
332 special way (probably by adding additional bits to exception_table_list).
333 A two-pass scheme could then be used by __throw to iterate
334 through the table. The first pass would search for a relevant
335 user-defined handler for the current context of the throw, and if
336 one is found, the second pass would then invoke all needed cleanups
337 before jumping to the user-defined handler.
338
339 Many languages (including C++ and Ada) make execution of a
340 user-defined handler conditional on the "type" of the exception
341 thrown. (The type of the exception is actually the type of the data
342 that is thrown with the exception.) It will thus be necessary for
343 __throw to be able to determine if a given user-defined
344 exception handler will actually be executed, given the type of
345 exception.
346
347 One scheme is to add additional information to exception_table_list
348 as to the types of exceptions accepted by each handler. __throw
349 can do the type comparisons and then determine if the handler is
350 actually going to be executed.
351
352 There is currently no significant level of debugging support
353 available, other than to place a breakpoint on __throw. While
354 this is sufficient in most cases, it would be helpful to be able to
355 know where a given exception was going to be thrown to before it is
356 actually thrown, and to be able to choose between stopping before
357 every exception region (including cleanups), or just user-defined
358 exception regions. This should be possible to do in the two-pass
359 scheme by adding additional labels to __throw for appropriate
360 breakpoints, and additional debugger commands could be added to
361 query various state variables to determine what actions are to be
362 performed next.
363
364 Another major problem that is being worked on is the issue with stack
365 unwinding on various platforms. Currently the only platforms that have
366 support for the generation of a generic unwinder are the SPARC and MIPS.
367 All other ports require per-function unwinders, which produce large
368 amounts of code bloat.
369
370 For setjmp/longjmp based exception handling, some of the details
371 are as above, but there are some additional details. This section
372 discusses the details.
373
374 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
375 optimize EH regions yet. We don't have to worry about machine
376 specific issues with unwinding the stack, as we rely upon longjmp
377 for all the machine specific details. There is no variable context
378 of a throw, just the one implied by the dynamic handler stack
379 pointed to by the dynamic handler chain. There is no exception
380 table, and no calls to __register_excetpions. __sjthrow is used
381 instead of __throw, and it works by using the dynamic handler
382 chain, and longjmp. -fasynchronous-exceptions has no effect, as
383 the elimination of trivial exception regions is not yet performed.
384
385 A frontend can set protect_cleanup_actions_with_terminate when all
386 the cleanup actions should be protected with an EH region that
387 calls terminate when an unhandled exception is throw. C++ does
388 this, Ada does not. */
389
390
391 #include "config.h"
392 #include "defaults.h"
393 #include <stdio.h>
394 #include "rtl.h"
395 #include "tree.h"
396 #include "flags.h"
397 #include "except.h"
398 #include "function.h"
399 #include "insn-flags.h"
400 #include "expr.h"
401 #include "insn-codes.h"
402 #include "regs.h"
403 #include "hard-reg-set.h"
404 #include "insn-config.h"
405 #include "recog.h"
406 #include "output.h"
407
408 /* One to use setjmp/longjmp method of generating code for exception
409 handling. */
410
411 int exceptions_via_longjmp = 2;
412
413 /* One to enable asynchronous exception support. */
414
415 int asynchronous_exceptions = 0;
416
417 /* One to protect cleanup actions with a handler that calls
418 __terminate, zero otherwise. */
419
420 int protect_cleanup_actions_with_terminate;
421
422 /* A list of labels used for exception handlers. Created by
423 find_exception_handler_labels for the optimization passes. */
424
425 rtx exception_handler_labels;
426
427 /* Nonzero means that __throw was invoked.
428
429 This is used by the C++ frontend to know if code needs to be emitted
430 for __throw or not. */
431
432 int throw_used;
433
434 /* The dynamic handler chain. Nonzero if the function has already
435 fetched a pointer to the dynamic handler chain for exception
436 handling. */
437
438 rtx current_function_dhc;
439
440 /* The dynamic cleanup chain. Nonzero if the function has already
441 fetched a pointer to the dynamic cleanup chain for exception
442 handling. */
443
444 rtx current_function_dcc;
445
446 /* A stack used for keeping track of the currectly active exception
447 handling region. As each exception region is started, an entry
448 describing the region is pushed onto this stack. The current
449 region can be found by looking at the top of the stack, and as we
450 exit regions, the corresponding entries are popped.
451
452 Entries cannot overlap; they can be nested. So there is only one
453 entry at most that corresponds to the current instruction, and that
454 is the entry on the top of the stack. */
455
456 static struct eh_stack ehstack;
457
458 /* A queue used for tracking which exception regions have closed but
459 whose handlers have not yet been expanded. Regions are emitted in
460 groups in an attempt to improve paging performance.
461
462 As we exit a region, we enqueue a new entry. The entries are then
463 dequeued during expand_leftover_cleanups and expand_start_all_catch,
464
465 We should redo things so that we either take RTL for the handler,
466 or we expand the handler expressed as a tree immediately at region
467 end time. */
468
469 static struct eh_queue ehqueue;
470
471 /* Insns for all of the exception handlers for the current function.
472 They are currently emitted by the frontend code. */
473
474 rtx catch_clauses;
475
476 /* A TREE_CHAINed list of handlers for regions that are not yet
477 closed. The TREE_VALUE of each entry contains the handler for the
478 corresponding entry on the ehstack. */
479
480 static tree protect_list;
481
482 /* Stacks to keep track of various labels. */
483
484 /* Keeps track of the label to resume to should one want to resume
485 normal control flow out of a handler (instead of, say, returning to
486 the caller of the current function or exiting the program). */
487
488 struct label_node *caught_return_label_stack = NULL;
489
490 /* A random data area for the front end's own use. */
491
492 struct label_node *false_label_stack = NULL;
493
494 #ifndef DWARF2_UNWIND_INFO
495 /* The rtx and the tree for the saved PC value. */
496
497 rtx eh_saved_pc_rtx;
498 tree eh_saved_pc;
499 #endif
500
501 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
502 static void expand_rethrow PROTO((rtx));
503
504 \f
505 /* Various support routines to manipulate the various data structures
506 used by the exception handling code. */
507
508 /* Push a label entry onto the given STACK. */
509
510 void
511 push_label_entry (stack, rlabel, tlabel)
512 struct label_node **stack;
513 rtx rlabel;
514 tree tlabel;
515 {
516 struct label_node *newnode
517 = (struct label_node *) xmalloc (sizeof (struct label_node));
518
519 if (rlabel)
520 newnode->u.rlabel = rlabel;
521 else
522 newnode->u.tlabel = tlabel;
523 newnode->chain = *stack;
524 *stack = newnode;
525 }
526
527 /* Pop a label entry from the given STACK. */
528
529 rtx
530 pop_label_entry (stack)
531 struct label_node **stack;
532 {
533 rtx label;
534 struct label_node *tempnode;
535
536 if (! *stack)
537 return NULL_RTX;
538
539 tempnode = *stack;
540 label = tempnode->u.rlabel;
541 *stack = (*stack)->chain;
542 free (tempnode);
543
544 return label;
545 }
546
547 /* Return the top element of the given STACK. */
548
549 tree
550 top_label_entry (stack)
551 struct label_node **stack;
552 {
553 if (! *stack)
554 return NULL_TREE;
555
556 return (*stack)->u.tlabel;
557 }
558
559 /* Make a copy of ENTRY using xmalloc to allocate the space. */
560
561 static struct eh_entry *
562 copy_eh_entry (entry)
563 struct eh_entry *entry;
564 {
565 struct eh_entry *newentry;
566
567 newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
568 bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
569
570 return newentry;
571 }
572
573 /* Push a new eh_node entry onto STACK. */
574
575 static void
576 push_eh_entry (stack)
577 struct eh_stack *stack;
578 {
579 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
580 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
581
582 entry->outer_context = gen_label_rtx ();
583 entry->exception_handler_label = gen_label_rtx ();
584 entry->finalization = NULL_TREE;
585
586 node->entry = entry;
587 node->chain = stack->top;
588 stack->top = node;
589 }
590
591 /* Pop an entry from the given STACK. */
592
593 static struct eh_entry *
594 pop_eh_entry (stack)
595 struct eh_stack *stack;
596 {
597 struct eh_node *tempnode;
598 struct eh_entry *tempentry;
599
600 tempnode = stack->top;
601 tempentry = tempnode->entry;
602 stack->top = stack->top->chain;
603 free (tempnode);
604
605 return tempentry;
606 }
607
608 /* Enqueue an ENTRY onto the given QUEUE. */
609
610 static void
611 enqueue_eh_entry (queue, entry)
612 struct eh_queue *queue;
613 struct eh_entry *entry;
614 {
615 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
616
617 node->entry = entry;
618 node->chain = NULL;
619
620 if (queue->head == NULL)
621 {
622 queue->head = node;
623 }
624 else
625 {
626 queue->tail->chain = node;
627 }
628 queue->tail = node;
629 }
630
631 /* Dequeue an entry from the given QUEUE. */
632
633 static struct eh_entry *
634 dequeue_eh_entry (queue)
635 struct eh_queue *queue;
636 {
637 struct eh_node *tempnode;
638 struct eh_entry *tempentry;
639
640 if (queue->head == NULL)
641 return NULL;
642
643 tempnode = queue->head;
644 queue->head = queue->head->chain;
645
646 tempentry = tempnode->entry;
647 free (tempnode);
648
649 return tempentry;
650 }
651 \f
652 /* Routine to see if exception exception handling is turned on.
653 DO_WARN is non-zero if we want to inform the user that exception
654 handling is turned off.
655
656 This is used to ensure that -fexceptions has been specified if the
657 compiler tries to use any exception-specific functions. */
658
659 int
660 doing_eh (do_warn)
661 int do_warn;
662 {
663 if (! flag_exceptions)
664 {
665 static int warned = 0;
666 if (! warned && do_warn)
667 {
668 error ("exception handling disabled, use -fexceptions to enable");
669 warned = 1;
670 }
671 return 0;
672 }
673 return 1;
674 }
675
676 /* Given a return address in ADDR, determine the address we should use
677 to find the corresponding EH region. */
678
679 rtx
680 eh_outer_context (addr)
681 rtx addr;
682 {
683 /* First mask out any unwanted bits. */
684 #ifdef MASK_RETURN_ADDR
685 expand_and (addr, MASK_RETURN_ADDR, addr);
686 #endif
687
688 /* Then adjust to find the real return address. */
689 #if defined (RETURN_ADDR_OFFSET)
690 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
691 #endif
692
693 return addr;
694 }
695
696 /* Start a new exception region for a region of code that has a
697 cleanup action and push the HANDLER for the region onto
698 protect_list. All of the regions created with add_partial_entry
699 will be ended when end_protect_partials is invoked. */
700
701 void
702 add_partial_entry (handler)
703 tree handler;
704 {
705 expand_eh_region_start ();
706
707 /* Make sure the entry is on the correct obstack. */
708 push_obstacks_nochange ();
709 resume_temporary_allocation ();
710
711 /* Because this is a cleanup action, we may have to protect the handler
712 with __terminate. */
713 handler = protect_with_terminate (handler);
714
715 protect_list = tree_cons (NULL_TREE, handler, protect_list);
716 pop_obstacks ();
717 }
718
719 /* Get a reference to the dynamic handler chain. It points to the
720 pointer to the next element in the dynamic handler chain. It ends
721 when there are no more elements in the dynamic handler chain, when
722 the value is &top_elt from libgcc2.c. Immediately after the
723 pointer, is an area suitable for setjmp/longjmp when
724 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
725 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
726 isn't defined.
727
728 This routine is here to facilitate the porting of this code to
729 systems with threads. One can either replace the routine we emit a
730 call for here in libgcc2.c, or one can modify this routine to work
731 with their thread system. */
732
733 rtx
734 get_dynamic_handler_chain ()
735 {
736 #if 0
737 /* Do this once we figure out how to get this to the front of the
738 function, and we really only want one per real function, not one
739 per inlined function. */
740 if (current_function_dhc == 0)
741 {
742 rtx dhc, insns;
743 start_sequence ();
744
745 /* ... */
746 insns = get_insns ();
747 end_sequence ();
748 emit_insns_before (insns, get_first_nonparm_insn ());
749 }
750 /* We don't want a copy of the dhc, but rather, the single dhc. */
751 return gen_rtx (MEM, Pmode, current_function_dhc);
752 #endif
753
754 static tree fn;
755 tree expr;
756
757 if (fn == NULL_TREE)
758 {
759 tree fntype;
760 fn = get_identifier ("__get_dynamic_handler_chain");
761 push_obstacks_nochange ();
762 end_temporary_allocation ();
763 fntype = build_pointer_type (build_pointer_type
764 (build_pointer_type (void_type_node)));
765 fntype = build_function_type (fntype, NULL_TREE);
766 fn = build_decl (FUNCTION_DECL, fn, fntype);
767 DECL_EXTERNAL (fn) = 1;
768 TREE_PUBLIC (fn) = 1;
769 DECL_ARTIFICIAL (fn) = 1;
770 TREE_READONLY (fn) = 1;
771 make_decl_rtl (fn, NULL_PTR, 1);
772 assemble_external (fn);
773 pop_obstacks ();
774 }
775
776 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
777 expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
778 expr, NULL_TREE, NULL_TREE);
779 TREE_SIDE_EFFECTS (expr) = 1;
780 expr = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (expr)), expr);
781
782 return expand_expr (expr, NULL_RTX, VOIDmode, 0);
783 }
784
785 /* Get a reference to the dynamic cleanup chain. It points to the
786 pointer to the next element in the dynamic cleanup chain.
787 Immediately after the pointer, are two Pmode variables, one for a
788 pointer to a function that performs the cleanup action, and the
789 second, the argument to pass to that function. */
790
791 rtx
792 get_dynamic_cleanup_chain ()
793 {
794 rtx dhc, dcc;
795
796 dhc = get_dynamic_handler_chain ();
797 dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
798
799 current_function_dcc = copy_to_reg (dcc);
800
801 /* We don't want a copy of the dcc, but rather, the single dcc. */
802 return gen_rtx (MEM, Pmode, current_function_dcc);
803 }
804
805 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
806 LABEL is an rtx of code CODE_LABEL, in this function. */
807
808 void
809 jumpif_rtx (x, label)
810 rtx x;
811 rtx label;
812 {
813 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
814 }
815
816 /* Generate code to evaluate X and jump to LABEL if the value is zero.
817 LABEL is an rtx of code CODE_LABEL, in this function. */
818
819 void
820 jumpifnot_rtx (x, label)
821 rtx x;
822 rtx label;
823 {
824 jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
825 }
826
827 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
828 We just need to create an element for the cleanup list, and push it
829 into the chain.
830
831 A dynamic cleanup is a cleanup action implied by the presence of an
832 element on the EH runtime dynamic cleanup stack that is to be
833 performed when an exception is thrown. The cleanup action is
834 performed by __sjthrow when an exception is thrown. Only certain
835 actions can be optimized into dynamic cleanup actions. For the
836 restrictions on what actions can be performed using this routine,
837 see expand_eh_region_start_tree. */
838
839 static void
840 start_dynamic_cleanup (func, arg)
841 tree func;
842 tree arg;
843 {
844 rtx dhc, dcc;
845 rtx new_func, new_arg;
846 rtx x, buf;
847 int size;
848
849 /* We allocate enough room for a pointer to the function, and
850 one argument. */
851 size = 2;
852
853 /* XXX, FIXME: The stack space allocated this way is too long lived,
854 but there is no allocation routine that allocates at the level of
855 the last binding contour. */
856 buf = assign_stack_local (BLKmode,
857 GET_MODE_SIZE (Pmode)*(size+1),
858 0);
859
860 buf = change_address (buf, Pmode, NULL_RTX);
861
862 /* Store dcc into the first word of the newly allocated buffer. */
863
864 dcc = get_dynamic_cleanup_chain ();
865 emit_move_insn (buf, dcc);
866
867 /* Store func and arg into the cleanup list element. */
868
869 new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
870 GET_MODE_SIZE (Pmode)));
871 new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
872 GET_MODE_SIZE (Pmode)*2));
873 x = expand_expr (func, new_func, Pmode, 0);
874 if (x != new_func)
875 emit_move_insn (new_func, x);
876
877 x = expand_expr (arg, new_arg, Pmode, 0);
878 if (x != new_arg)
879 emit_move_insn (new_arg, x);
880
881 /* Update the cleanup chain. */
882
883 emit_move_insn (dcc, XEXP (buf, 0));
884 }
885
886 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
887 handler stack. This should only be used by expand_eh_region_start
888 or expand_eh_region_start_tree. */
889
890 static void
891 start_dynamic_handler ()
892 {
893 rtx dhc, dcc;
894 rtx x, arg, buf;
895 int size;
896
897 #ifndef DONT_USE_BUILTIN_SETJMP
898 /* The number of Pmode words for the setjmp buffer, when using the
899 builtin setjmp/longjmp, see expand_builtin, case
900 BUILT_IN_LONGJMP. */
901 size = 5;
902 #else
903 #ifdef JMP_BUF_SIZE
904 size = JMP_BUF_SIZE;
905 #else
906 /* Should be large enough for most systems, if it is not,
907 JMP_BUF_SIZE should be defined with the proper value. It will
908 also tend to be larger than necessary for most systems, a more
909 optimal port will define JMP_BUF_SIZE. */
910 size = FIRST_PSEUDO_REGISTER+2;
911 #endif
912 #endif
913 /* XXX, FIXME: The stack space allocated this way is too long lived,
914 but there is no allocation routine that allocates at the level of
915 the last binding contour. */
916 arg = assign_stack_local (BLKmode,
917 GET_MODE_SIZE (Pmode)*(size+1),
918 0);
919
920 arg = change_address (arg, Pmode, NULL_RTX);
921
922 /* Store dhc into the first word of the newly allocated buffer. */
923
924 dhc = get_dynamic_handler_chain ();
925 dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
926 GET_MODE_SIZE (Pmode)));
927 emit_move_insn (arg, dhc);
928
929 /* Zero out the start of the cleanup chain. */
930 emit_move_insn (dcc, const0_rtx);
931
932 /* The jmpbuf starts two words into the area allocated. */
933 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
934
935 #ifdef DONT_USE_BUILTIN_SETJMP
936 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
937 buf, Pmode);
938 #else
939 x = expand_builtin_setjmp (buf, NULL_RTX);
940 #endif
941
942 /* If we come back here for a catch, transfer control to the
943 handler. */
944
945 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
946
947 /* We are committed to this, so update the handler chain. */
948
949 emit_move_insn (dhc, XEXP (arg, 0));
950 }
951
952 /* Start an exception handling region for the given cleanup action.
953 All instructions emitted after this point are considered to be part
954 of the region until expand_eh_region_end is invoked. CLEANUP is
955 the cleanup action to perform. The return value is true if the
956 exception region was optimized away. If that case,
957 expand_eh_region_end does not need to be called for this cleanup,
958 nor should it be.
959
960 This routine notices one particular common case in C++ code
961 generation, and optimizes it so as to not need the exception
962 region. It works by creating a dynamic cleanup action, instead of
963 of a using an exception region. */
964
965 int
966 expand_eh_region_start_tree (decl, cleanup)
967 tree decl;
968 tree cleanup;
969 {
970 rtx note;
971
972 /* This is the old code. */
973 if (! doing_eh (0))
974 return 0;
975
976 /* The optimization only applies to actions protected with
977 terminate, and only applies if we are using the setjmp/longjmp
978 codegen method. */
979 if (exceptions_via_longjmp
980 && protect_cleanup_actions_with_terminate)
981 {
982 tree func, arg;
983 tree args;
984
985 /* Ignore any UNSAVE_EXPR. */
986 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
987 cleanup = TREE_OPERAND (cleanup, 0);
988
989 /* Further, it only applies if the action is a call, if there
990 are 2 arguments, and if the second argument is 2. */
991
992 if (TREE_CODE (cleanup) == CALL_EXPR
993 && (args = TREE_OPERAND (cleanup, 1))
994 && (func = TREE_OPERAND (cleanup, 0))
995 && (arg = TREE_VALUE (args))
996 && (args = TREE_CHAIN (args))
997
998 /* is the second argument 2? */
999 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
1000 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
1001 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
1002
1003 /* Make sure there are no other arguments. */
1004 && TREE_CHAIN (args) == NULL_TREE)
1005 {
1006 /* Arrange for returns and gotos to pop the entry we make on the
1007 dynamic cleanup stack. */
1008 expand_dcc_cleanup (decl);
1009 start_dynamic_cleanup (func, arg);
1010 return 1;
1011 }
1012 }
1013
1014 expand_eh_region_start_for_decl (decl);
1015 ehstack.top->entry->finalization = cleanup;
1016
1017 return 0;
1018 }
1019
1020 /* Just like expand_eh_region_start, except if a cleanup action is
1021 entered on the cleanup chain, the TREE_PURPOSE of the element put
1022 on the chain is DECL. DECL should be the associated VAR_DECL, if
1023 any, otherwise it should be NULL_TREE. */
1024
1025 void
1026 expand_eh_region_start_for_decl (decl)
1027 tree decl;
1028 {
1029 rtx note;
1030
1031 /* This is the old code. */
1032 if (! doing_eh (0))
1033 return;
1034
1035 if (exceptions_via_longjmp)
1036 {
1037 /* We need a new block to record the start and end of the
1038 dynamic handler chain. We could always do this, but we
1039 really want to permit jumping into such a block, and we want
1040 to avoid any errors or performance impact in the SJ EH code
1041 for now. */
1042 expand_start_bindings (0);
1043
1044 /* But we don't need or want a new temporary level. */
1045 pop_temp_slots ();
1046
1047 /* Mark this block as created by expand_eh_region_start. This
1048 is so that we can pop the block with expand_end_bindings
1049 automatically. */
1050 mark_block_as_eh_region ();
1051
1052 /* Arrange for returns and gotos to pop the entry we make on the
1053 dynamic handler stack. */
1054 expand_dhc_cleanup (decl);
1055 }
1056
1057 push_eh_entry (&ehstack);
1058 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1059 NOTE_BLOCK_NUMBER (note)
1060 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1061 if (exceptions_via_longjmp)
1062 start_dynamic_handler ();
1063 }
1064
1065 /* Start an exception handling region. All instructions emitted after
1066 this point are considered to be part of the region until
1067 expand_eh_region_end is invoked. */
1068
1069 void
1070 expand_eh_region_start ()
1071 {
1072 expand_eh_region_start_for_decl (NULL_TREE);
1073 }
1074
1075 /* End an exception handling region. The information about the region
1076 is found on the top of ehstack.
1077
1078 HANDLER is either the cleanup for the exception region, or if we're
1079 marking the end of a try block, HANDLER is integer_zero_node.
1080
1081 HANDLER will be transformed to rtl when expand_leftover_cleanups
1082 is invoked. */
1083
1084 void
1085 expand_eh_region_end (handler)
1086 tree handler;
1087 {
1088 struct eh_entry *entry;
1089 rtx note;
1090
1091 if (! doing_eh (0))
1092 return;
1093
1094 entry = pop_eh_entry (&ehstack);
1095
1096 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1097 NOTE_BLOCK_NUMBER (note)
1098 = CODE_LABEL_NUMBER (entry->exception_handler_label);
1099 if (exceptions_via_longjmp == 0
1100 /* We share outer_context between regions; only emit it once. */
1101 && INSN_UID (entry->outer_context) == 0)
1102 {
1103 rtx label;
1104
1105 label = gen_label_rtx ();
1106 emit_jump (label);
1107
1108 /* Emit a label marking the end of this exception region that
1109 is used for rethrowing into the outer context. */
1110 emit_label (entry->outer_context);
1111 expand_internal_throw ();
1112
1113 emit_label (label);
1114 }
1115
1116 entry->finalization = handler;
1117
1118 enqueue_eh_entry (&ehqueue, entry);
1119
1120 /* If we have already started ending the bindings, don't recurse.
1121 This only happens when exceptions_via_longjmp is true. */
1122 if (is_eh_region ())
1123 {
1124 /* Because we don't need or want a new temporary level and
1125 because we didn't create one in expand_eh_region_start,
1126 create a fake one now to avoid removing one in
1127 expand_end_bindings. */
1128 push_temp_slots ();
1129
1130 mark_block_as_not_eh_region ();
1131
1132 /* Maybe do this to prevent jumping in and so on... */
1133 expand_end_bindings (NULL_TREE, 0, 0);
1134 }
1135 }
1136
1137 /* End the EH region for a goto fixup. We only need them in the region-based
1138 EH scheme. */
1139
1140 void
1141 expand_fixup_region_start ()
1142 {
1143 if (! doing_eh (0) || exceptions_via_longjmp)
1144 return;
1145
1146 expand_eh_region_start ();
1147 }
1148
1149 /* End the EH region for a goto fixup. CLEANUP is the cleanup we just
1150 expanded; to avoid running it twice if it throws, we look through the
1151 ehqueue for a matching region and rethrow from its outer_context. */
1152
1153 void
1154 expand_fixup_region_end (cleanup)
1155 tree cleanup;
1156 {
1157 struct eh_node *node;
1158
1159 if (! doing_eh (0) || exceptions_via_longjmp)
1160 return;
1161
1162 for (node = ehstack.top; node && node->entry->finalization != cleanup; )
1163 node = node->chain;
1164 if (node == 0)
1165 for (node = ehqueue.head; node && node->entry->finalization != cleanup; )
1166 node = node->chain;
1167 if (node == 0)
1168 abort ();
1169
1170 ehstack.top->entry->outer_context = node->entry->outer_context;
1171
1172 /* Just rethrow. size_zero_node is just a NOP. */
1173 expand_eh_region_end (size_zero_node);
1174 }
1175
1176 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1177 call to __sjthrow.
1178
1179 Otherwise, we emit a call to __throw and note that we threw
1180 something, so we know we need to generate the necessary code for
1181 __throw.
1182
1183 Before invoking throw, the __eh_pc variable must have been set up
1184 to contain the PC being thrown from. This address is used by
1185 __throw to determine which exception region (if any) is
1186 responsible for handling the exception. */
1187
1188 void
1189 emit_throw ()
1190 {
1191 if (exceptions_via_longjmp)
1192 {
1193 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1194 }
1195 else
1196 {
1197 #ifdef JUMP_TO_THROW
1198 emit_indirect_jump (throw_libfunc);
1199 #else
1200 #ifndef DWARF2_UNWIND_INFO
1201 /* Prevent assemble_external from doing anything with this symbol. */
1202 SYMBOL_REF_USED (throw_libfunc) = 1;
1203 #endif
1204 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1205 #endif
1206 throw_used = 1;
1207 }
1208 emit_barrier ();
1209 }
1210
1211 /* Throw the current exception. If appropriate, this is done by jumping
1212 to the next handler. */
1213
1214 void
1215 expand_internal_throw ()
1216 {
1217 #ifndef DWARF2_UNWIND_INFO
1218 if (! exceptions_via_longjmp)
1219 {
1220 rtx label = gen_label_rtx ();
1221 emit_label (label);
1222 label = gen_rtx (LABEL_REF, Pmode, label);
1223 assemble_external (eh_saved_pc);
1224 emit_move_insn (eh_saved_pc_rtx, label);
1225 }
1226 #endif
1227 emit_throw ();
1228 }
1229
1230 /* Called from expand_exception_blocks and expand_end_catch_block to
1231 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1232
1233 void
1234 expand_leftover_cleanups ()
1235 {
1236 struct eh_entry *entry;
1237
1238 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1239 {
1240 rtx prev;
1241
1242 /* A leftover try block. Shouldn't be one here. */
1243 if (entry->finalization == integer_zero_node)
1244 abort ();
1245
1246 /* Output the label for the start of the exception handler. */
1247 emit_label (entry->exception_handler_label);
1248
1249 #ifdef HAVE_exception_receiver
1250 if (! exceptions_via_longjmp)
1251 if (HAVE_exception_receiver)
1252 emit_insn (gen_exception_receiver ());
1253 #endif
1254
1255 #ifdef HAVE_nonlocal_goto_receiver
1256 if (! exceptions_via_longjmp)
1257 if (HAVE_nonlocal_goto_receiver)
1258 emit_insn (gen_nonlocal_goto_receiver ());
1259 #endif
1260
1261 /* And now generate the insns for the handler. */
1262 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1263
1264 prev = get_last_insn ();
1265 if (prev == NULL || GET_CODE (prev) != BARRIER)
1266 /* Emit code to throw to the outer context if we fall off
1267 the end of the handler. */
1268 expand_rethrow (entry->outer_context);
1269
1270 do_pending_stack_adjust ();
1271 free (entry);
1272 }
1273 }
1274
1275 /* Called at the start of a block of try statements. */
1276 void
1277 expand_start_try_stmts ()
1278 {
1279 if (! doing_eh (1))
1280 return;
1281
1282 expand_eh_region_start ();
1283 }
1284
1285 /* Generate RTL for the start of a group of catch clauses.
1286
1287 It is responsible for starting a new instruction sequence for the
1288 instructions in the catch block, and expanding the handlers for the
1289 internally-generated exception regions nested within the try block
1290 corresponding to this catch block. */
1291
1292 void
1293 expand_start_all_catch ()
1294 {
1295 struct eh_entry *entry;
1296 tree label;
1297 rtx outer_context;
1298
1299 if (! doing_eh (1))
1300 return;
1301
1302 outer_context = ehstack.top->entry->outer_context;
1303
1304 /* End the try block. */
1305 expand_eh_region_end (integer_zero_node);
1306
1307 emit_line_note (input_filename, lineno);
1308 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1309
1310 /* The label for the exception handling block that we will save.
1311 This is Lresume in the documention. */
1312 expand_label (label);
1313
1314 /* Push the label that points to where normal flow is resumed onto
1315 the top of the label stack. */
1316 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1317
1318 /* Start a new sequence for all the catch blocks. We will add this
1319 to the global sequence catch_clauses when we have completed all
1320 the handlers in this handler-seq. */
1321 start_sequence ();
1322
1323 while (1)
1324 {
1325 rtx prev;
1326
1327 entry = dequeue_eh_entry (&ehqueue);
1328 /* Emit the label for the exception handler for this region, and
1329 expand the code for the handler.
1330
1331 Note that a catch region is handled as a side-effect here;
1332 for a try block, entry->finalization will contain
1333 integer_zero_node, so no code will be generated in the
1334 expand_expr call below. But, the label for the handler will
1335 still be emitted, so any code emitted after this point will
1336 end up being the handler. */
1337 emit_label (entry->exception_handler_label);
1338
1339 #ifdef HAVE_exception_receiver
1340 if (! exceptions_via_longjmp)
1341 if (HAVE_exception_receiver)
1342 emit_insn (gen_exception_receiver ());
1343 #endif
1344
1345 #ifdef HAVE_nonlocal_goto_receiver
1346 if (! exceptions_via_longjmp)
1347 if (HAVE_nonlocal_goto_receiver)
1348 emit_insn (gen_nonlocal_goto_receiver ());
1349 #endif
1350
1351 /* When we get down to the matching entry for this try block, stop. */
1352 if (entry->finalization == integer_zero_node)
1353 {
1354 /* Don't forget to free this entry. */
1355 free (entry);
1356 break;
1357 }
1358
1359 /* And now generate the insns for the handler. */
1360 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1361
1362 prev = get_last_insn ();
1363 if (prev == NULL || GET_CODE (prev) != BARRIER)
1364 /* Code to throw out to outer context when we fall off end
1365 of the handler. We can't do this here for catch blocks,
1366 so it's done in expand_end_all_catch instead. */
1367 expand_rethrow (entry->outer_context);
1368
1369 do_pending_stack_adjust ();
1370 free (entry);
1371 }
1372
1373 /* If we are not doing setjmp/longjmp EH, because we are reordered
1374 out of line, we arrange to rethrow in the outer context. We need to
1375 do this because we are not physically within the region, if any, that
1376 logically contains this catch block. */
1377 if (! exceptions_via_longjmp)
1378 {
1379 expand_eh_region_start ();
1380 ehstack.top->entry->outer_context = outer_context;
1381 }
1382 }
1383
1384 /* Finish up the catch block. At this point all the insns for the
1385 catch clauses have already been generated, so we only have to add
1386 them to the catch_clauses list. We also want to make sure that if
1387 we fall off the end of the catch clauses that we rethrow to the
1388 outer EH region. */
1389
1390 void
1391 expand_end_all_catch ()
1392 {
1393 rtx new_catch_clause, outer_context;
1394
1395 if (! doing_eh (1))
1396 return;
1397
1398 outer_context = ehstack.top->entry->outer_context;
1399 if (! exceptions_via_longjmp)
1400 /* Finish the rethrow region. size_zero_node is just a NOP. */
1401 expand_eh_region_end (size_zero_node);
1402
1403 /* Code to throw out to outer context, if we fall off end of catch
1404 handlers. This is rethrow (Lresume, same id, same obj) in the
1405 documentation. We use Lresume because we know that it will throw
1406 to the correct context.
1407
1408 In other words, if the catch handler doesn't exit or return, we
1409 do a "throw" (using the address of Lresume as the point being
1410 thrown from) so that the outer EH region can then try to process
1411 the exception. */
1412 expand_rethrow (outer_context);
1413
1414 /* Now we have the complete catch sequence. */
1415 new_catch_clause = get_insns ();
1416 end_sequence ();
1417
1418 /* This level of catch blocks is done, so set up the successful
1419 catch jump label for the next layer of catch blocks. */
1420 pop_label_entry (&caught_return_label_stack);
1421
1422 /* Add the new sequence of catches to the main one for this function. */
1423 push_to_sequence (catch_clauses);
1424 emit_insns (new_catch_clause);
1425 catch_clauses = get_insns ();
1426 end_sequence ();
1427
1428 /* Here we fall through into the continuation code. */
1429 }
1430
1431 /* Rethrow from the outer context LABEL. */
1432
1433 static void
1434 expand_rethrow (label)
1435 rtx label;
1436 {
1437 if (exceptions_via_longjmp)
1438 emit_throw ();
1439 else
1440 emit_jump (label);
1441 }
1442
1443 /* End all the pending exception regions on protect_list. The handlers
1444 will be emitted when expand_leftover_cleanups is invoked. */
1445
1446 void
1447 end_protect_partials ()
1448 {
1449 while (protect_list)
1450 {
1451 expand_eh_region_end (TREE_VALUE (protect_list));
1452 protect_list = TREE_CHAIN (protect_list);
1453 }
1454 }
1455
1456 /* Arrange for __terminate to be called if there is an unhandled throw
1457 from within E. */
1458
1459 tree
1460 protect_with_terminate (e)
1461 tree e;
1462 {
1463 /* We only need to do this when using setjmp/longjmp EH and the
1464 language requires it, as otherwise we protect all of the handlers
1465 at once, if we need to. */
1466 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1467 {
1468 tree handler, result;
1469
1470 /* All cleanups must be on the function_obstack. */
1471 push_obstacks_nochange ();
1472 resume_temporary_allocation ();
1473
1474 handler = make_node (RTL_EXPR);
1475 TREE_TYPE (handler) = void_type_node;
1476 RTL_EXPR_RTL (handler) = const0_rtx;
1477 TREE_SIDE_EFFECTS (handler) = 1;
1478 start_sequence_for_rtl_expr (handler);
1479
1480 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1481 emit_barrier ();
1482
1483 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1484 end_sequence ();
1485
1486 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1487 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1488 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1489 TREE_READONLY (result) = TREE_READONLY (e);
1490
1491 pop_obstacks ();
1492
1493 e = result;
1494 }
1495
1496 return e;
1497 }
1498 \f
1499 /* The exception table that we build that is used for looking up and
1500 dispatching exceptions, the current number of entries, and its
1501 maximum size before we have to extend it.
1502
1503 The number in eh_table is the code label number of the exception
1504 handler for the region. This is added by add_eh_table_entry and
1505 used by output_exception_table_entry. */
1506
1507 static int *eh_table;
1508 static int eh_table_size;
1509 static int eh_table_max_size;
1510
1511 /* Note the need for an exception table entry for region N. If we
1512 don't need to output an explicit exception table, avoid all of the
1513 extra work.
1514
1515 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1516 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1517 label number of the exception handler for the region. */
1518
1519 void
1520 add_eh_table_entry (n)
1521 int n;
1522 {
1523 #ifndef OMIT_EH_TABLE
1524 if (eh_table_size >= eh_table_max_size)
1525 {
1526 if (eh_table)
1527 {
1528 eh_table_max_size += eh_table_max_size>>1;
1529
1530 if (eh_table_max_size < 0)
1531 abort ();
1532
1533 eh_table = (int *) xrealloc (eh_table,
1534 eh_table_max_size * sizeof (int));
1535 }
1536 else
1537 {
1538 eh_table_max_size = 252;
1539 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1540 }
1541 }
1542 eh_table[eh_table_size++] = n;
1543 #endif
1544 }
1545
1546 /* Return a non-zero value if we need to output an exception table.
1547
1548 On some platforms, we don't have to output a table explicitly.
1549 This routine doesn't mean we don't have one. */
1550
1551 int
1552 exception_table_p ()
1553 {
1554 if (eh_table)
1555 return 1;
1556
1557 return 0;
1558 }
1559
1560 /* 1 if we need a static constructor to register EH table info. */
1561
1562 int
1563 register_exception_table_p ()
1564 {
1565 #if defined (DWARF2_UNWIND_INFO)
1566 return 0;
1567 #endif
1568
1569 return exception_table_p ();
1570 }
1571
1572 /* Output the entry of the exception table corresponding to to the
1573 exception region numbered N to file FILE.
1574
1575 N is the code label number corresponding to the handler of the
1576 region. */
1577
1578 static void
1579 output_exception_table_entry (file, n)
1580 FILE *file;
1581 int n;
1582 {
1583 char buf[256];
1584 rtx sym;
1585
1586 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1587 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1588 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1589
1590 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1591 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1592 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1593
1594 ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1595 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1596 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1597
1598 putc ('\n', file); /* blank line */
1599 }
1600
1601 /* Output the exception table if we have and need one. */
1602
1603 void
1604 output_exception_table ()
1605 {
1606 int i;
1607 extern FILE *asm_out_file;
1608
1609 if (! doing_eh (0) || ! eh_table)
1610 return;
1611
1612 exception_section ();
1613
1614 /* Beginning marker for table. */
1615 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1616 assemble_label ("__EXCEPTION_TABLE__");
1617
1618 for (i = 0; i < eh_table_size; ++i)
1619 output_exception_table_entry (asm_out_file, eh_table[i]);
1620
1621 free (eh_table);
1622
1623 /* Ending marker for table. */
1624 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1625 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1626 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1627 putc ('\n', asm_out_file); /* blank line */
1628 }
1629
1630 /* Generate code to initialize the exception table at program startup
1631 time. */
1632
1633 void
1634 register_exception_table ()
1635 {
1636 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1637 VOIDmode, 1,
1638 gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1639 Pmode);
1640 }
1641 \f
1642 /* Emit the RTL for the start of the per-function unwinder for the
1643 current function. See emit_unwinder for further information.
1644
1645 DOESNT_NEED_UNWINDER is a target-specific macro that determines if
1646 the current function actually needs a per-function unwinder or not.
1647 By default, all functions need one. */
1648
1649 void
1650 start_eh_unwinder ()
1651 {
1652 #ifdef DOESNT_NEED_UNWINDER
1653 if (DOESNT_NEED_UNWINDER)
1654 return;
1655 #endif
1656
1657 /* If we are using the setjmp/longjmp implementation, we don't need a
1658 per function unwinder. */
1659
1660 if (exceptions_via_longjmp)
1661 return;
1662
1663 #ifdef DWARF2_UNWIND_INFO
1664 return;
1665 #endif
1666
1667 expand_eh_region_start ();
1668 }
1669
1670 /* Emit insns for the end of the per-function unwinder for the
1671 current function. */
1672
1673 void
1674 end_eh_unwinder ()
1675 {
1676 tree expr;
1677 rtx return_val_rtx, ret_val, label, end, insns;
1678
1679 if (! doing_eh (0))
1680 return;
1681
1682 #ifdef DOESNT_NEED_UNWINDER
1683 if (DOESNT_NEED_UNWINDER)
1684 return;
1685 #endif
1686
1687 /* If we are using the setjmp/longjmp implementation, we don't need a
1688 per function unwinder. */
1689
1690 if (exceptions_via_longjmp)
1691 return;
1692
1693 #ifdef DWARF2_UNWIND_INFO
1694 return;
1695 #else /* DWARF2_UNWIND_INFO */
1696
1697 assemble_external (eh_saved_pc);
1698
1699 expr = make_node (RTL_EXPR);
1700 TREE_TYPE (expr) = void_type_node;
1701 RTL_EXPR_RTL (expr) = const0_rtx;
1702 TREE_SIDE_EFFECTS (expr) = 1;
1703 start_sequence_for_rtl_expr (expr);
1704
1705 /* ret_val will contain the address of the code where the call
1706 to the current function occurred. */
1707 ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
1708 0, hard_frame_pointer_rtx);
1709 return_val_rtx = copy_to_reg (ret_val);
1710
1711 /* Get the address we need to use to determine what exception
1712 handler should be invoked, and store it in __eh_pc. */
1713 return_val_rtx = eh_outer_context (return_val_rtx);
1714 return_val_rtx = expand_binop (Pmode, sub_optab, return_val_rtx, GEN_INT (1),
1715 NULL_RTX, 0, OPTAB_LIB_WIDEN);
1716 emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
1717
1718 /* Either set things up so we do a return directly to __throw, or
1719 we return here instead. */
1720 #ifdef JUMP_TO_THROW
1721 emit_move_insn (ret_val, throw_libfunc);
1722 #else
1723 label = gen_label_rtx ();
1724 emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
1725 #endif
1726
1727 #ifdef RETURN_ADDR_OFFSET
1728 return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
1729 if (return_val_rtx != ret_val)
1730 emit_move_insn (ret_val, return_val_rtx);
1731 #endif
1732
1733 end = gen_label_rtx ();
1734 emit_jump (end);
1735
1736 RTL_EXPR_SEQUENCE (expr) = get_insns ();
1737 end_sequence ();
1738
1739 expand_eh_region_end (expr);
1740
1741 emit_jump (end);
1742
1743 #ifndef JUMP_TO_THROW
1744 emit_label (label);
1745 emit_throw ();
1746 #endif
1747
1748 expand_leftover_cleanups ();
1749
1750 emit_label (end);
1751
1752 #ifdef HAVE_return
1753 if (HAVE_return)
1754 {
1755 emit_jump_insn (gen_return ());
1756 emit_barrier ();
1757 }
1758 #endif
1759 #endif /* DWARF2_UNWIND_INFO */
1760 }
1761
1762 /* If necessary, emit insns for the per function unwinder for the
1763 current function. Called after all the code that needs unwind
1764 protection is output.
1765
1766 The unwinder takes care of catching any exceptions that have not
1767 been previously caught within the function, unwinding the stack to
1768 the next frame, and rethrowing using the address of the current
1769 function's caller as the context of the throw.
1770
1771 On some platforms __throw can do this by itself (or with the help
1772 of __unwind_function) so the per-function unwinder is
1773 unnecessary.
1774
1775 We cannot place the unwinder into the function until after we know
1776 we are done inlining, as we don't want to have more than one
1777 unwinder per non-inlined function. */
1778
1779 void
1780 emit_unwinder ()
1781 {
1782 rtx insns, insn;
1783
1784 start_sequence ();
1785 start_eh_unwinder ();
1786 insns = get_insns ();
1787 end_sequence ();
1788
1789 /* We place the start of the exception region associated with the
1790 per function unwinder at the top of the function. */
1791 if (insns)
1792 emit_insns_after (insns, get_insns ());
1793
1794 start_sequence ();
1795 end_eh_unwinder ();
1796 insns = get_insns ();
1797 end_sequence ();
1798
1799 /* And we place the end of the exception region before the USE and
1800 CLOBBER insns that may come at the end of the function. */
1801 if (insns == 0)
1802 return;
1803
1804 insn = get_last_insn ();
1805 while (GET_CODE (insn) == NOTE
1806 || (GET_CODE (insn) == INSN
1807 && (GET_CODE (PATTERN (insn)) == USE
1808 || GET_CODE (PATTERN (insn)) == CLOBBER)))
1809 insn = PREV_INSN (insn);
1810
1811 if (GET_CODE (insn) == CODE_LABEL
1812 && GET_CODE (PREV_INSN (insn)) == BARRIER)
1813 {
1814 insn = PREV_INSN (insn);
1815 }
1816 else
1817 {
1818 rtx label = gen_label_rtx ();
1819 emit_label_after (label, insn);
1820 insn = emit_jump_insn_after (gen_jump (label), insn);
1821 insn = emit_barrier_after (insn);
1822 }
1823
1824 emit_insns_after (insns, insn);
1825 }
1826
1827 /* Scan the current insns and build a list of handler labels. The
1828 resulting list is placed in the global variable exception_handler_labels.
1829
1830 It is called after the last exception handling region is added to
1831 the current function (when the rtl is almost all built for the
1832 current function) and before the jump optimization pass. */
1833
1834 void
1835 find_exception_handler_labels ()
1836 {
1837 rtx insn;
1838 int max_labelno = max_label_num ();
1839 int min_labelno = get_first_label_num ();
1840 rtx *labels;
1841
1842 exception_handler_labels = NULL_RTX;
1843
1844 /* If we aren't doing exception handling, there isn't much to check. */
1845 if (! doing_eh (0))
1846 return;
1847
1848 /* Generate a handy reference to each label. */
1849
1850 /* We call xmalloc here instead of alloca; we did the latter in the past,
1851 but found that it can sometimes end up being asked to allocate space
1852 for more than 1 million labels. */
1853 labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1854 bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1855
1856 /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
1857 labels -= min_labelno;
1858
1859 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1860 {
1861 if (GET_CODE (insn) == CODE_LABEL)
1862 if (CODE_LABEL_NUMBER (insn) >= min_labelno
1863 && CODE_LABEL_NUMBER (insn) < max_labelno)
1864 labels[CODE_LABEL_NUMBER (insn)] = insn;
1865 }
1866
1867 /* For each start of a region, add its label to the list. */
1868
1869 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1870 {
1871 if (GET_CODE (insn) == NOTE
1872 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1873 {
1874 rtx label = NULL_RTX;
1875
1876 if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1877 && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1878 {
1879 label = labels[NOTE_BLOCK_NUMBER (insn)];
1880
1881 if (label)
1882 exception_handler_labels
1883 = gen_rtx (EXPR_LIST, VOIDmode,
1884 label, exception_handler_labels);
1885 else
1886 warning ("didn't find handler for EH region %d",
1887 NOTE_BLOCK_NUMBER (insn));
1888 }
1889 else
1890 warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1891 }
1892 }
1893
1894 free (labels + min_labelno);
1895 }
1896
1897 /* Perform sanity checking on the exception_handler_labels list.
1898
1899 Can be called after find_exception_handler_labels is called to
1900 build the list of exception handlers for the current function and
1901 before we finish processing the current function. */
1902
1903 void
1904 check_exception_handler_labels ()
1905 {
1906 rtx insn, handler;
1907
1908 /* If we aren't doing exception handling, there isn't much to check. */
1909 if (! doing_eh (0))
1910 return;
1911
1912 /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1913 in each handler corresponds to the CODE_LABEL_NUMBER of the
1914 handler. */
1915
1916 for (handler = exception_handler_labels;
1917 handler;
1918 handler = XEXP (handler, 1))
1919 {
1920 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1921 {
1922 if (GET_CODE (insn) == CODE_LABEL)
1923 {
1924 if (CODE_LABEL_NUMBER (insn)
1925 == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1926 {
1927 if (insn != XEXP (handler, 0))
1928 warning ("mismatched handler %d",
1929 CODE_LABEL_NUMBER (insn));
1930 break;
1931 }
1932 }
1933 }
1934 if (insn == NULL_RTX)
1935 warning ("handler not found %d",
1936 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1937 }
1938
1939 /* Now go through and make sure that for each region there is a
1940 corresponding label. */
1941 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1942 {
1943 if (GET_CODE (insn) == NOTE
1944 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1945 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1946 {
1947 for (handler = exception_handler_labels;
1948 handler;
1949 handler = XEXP (handler, 1))
1950 {
1951 if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1952 == NOTE_BLOCK_NUMBER (insn))
1953 break;
1954 }
1955 if (handler == NULL_RTX)
1956 warning ("region exists, no handler %d",
1957 NOTE_BLOCK_NUMBER (insn));
1958 }
1959 }
1960 }
1961 \f
1962 /* This group of functions initializes the exception handling data
1963 structures at the start of the compilation, initializes the data
1964 structures at the start of a function, and saves and restores the
1965 exception handling data structures for the start/end of a nested
1966 function. */
1967
1968 /* Toplevel initialization for EH things. */
1969
1970 void
1971 init_eh ()
1972 {
1973 /* Generate rtl to reference the variable in which the PC of the
1974 current context is saved. */
1975 tree type = build_pointer_type (make_node (VOID_TYPE));
1976
1977 #ifndef DWARF2_UNWIND_INFO
1978 eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
1979 DECL_EXTERNAL (eh_saved_pc) = 1;
1980 TREE_PUBLIC (eh_saved_pc) = 1;
1981 make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
1982 eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
1983 #endif
1984 }
1985
1986 /* Initialize the per-function EH information. */
1987
1988 void
1989 init_eh_for_function ()
1990 {
1991 ehstack.top = 0;
1992 ehqueue.head = ehqueue.tail = 0;
1993 catch_clauses = NULL_RTX;
1994 false_label_stack = 0;
1995 caught_return_label_stack = 0;
1996 protect_list = NULL_TREE;
1997 current_function_dhc = NULL_RTX;
1998 current_function_dcc = NULL_RTX;
1999 }
2000
2001 /* Save some of the per-function EH info into the save area denoted by
2002 P.
2003
2004 This is currently called from save_stmt_status. */
2005
2006 void
2007 save_eh_status (p)
2008 struct function *p;
2009 {
2010 if (p == NULL)
2011 abort ();
2012
2013 p->ehstack = ehstack;
2014 p->ehqueue = ehqueue;
2015 p->catch_clauses = catch_clauses;
2016 p->false_label_stack = false_label_stack;
2017 p->caught_return_label_stack = caught_return_label_stack;
2018 p->protect_list = protect_list;
2019 p->dhc = current_function_dhc;
2020 p->dcc = current_function_dcc;
2021
2022 init_eh ();
2023 }
2024
2025 /* Restore the per-function EH info saved into the area denoted by P.
2026
2027 This is currently called from restore_stmt_status. */
2028
2029 void
2030 restore_eh_status (p)
2031 struct function *p;
2032 {
2033 if (p == NULL)
2034 abort ();
2035
2036 protect_list = p->protect_list;
2037 caught_return_label_stack = p->caught_return_label_stack;
2038 false_label_stack = p->false_label_stack;
2039 catch_clauses = p->catch_clauses;
2040 ehqueue = p->ehqueue;
2041 ehstack = p->ehstack;
2042 current_function_dhc = p->dhc;
2043 current_function_dcc = p->dcc;
2044 }
2045 \f
2046 /* This section is for the exception handling specific optimization
2047 pass. First are the internal routines, and then the main
2048 optimization pass. */
2049
2050 /* Determine if the given INSN can throw an exception. */
2051
2052 static int
2053 can_throw (insn)
2054 rtx insn;
2055 {
2056 /* Calls can always potentially throw exceptions. */
2057 if (GET_CODE (insn) == CALL_INSN)
2058 return 1;
2059
2060 if (asynchronous_exceptions)
2061 {
2062 /* If we wanted asynchronous exceptions, then everything but NOTEs
2063 and CODE_LABELs could throw. */
2064 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2065 return 1;
2066 }
2067
2068 return 0;
2069 }
2070
2071 /* Scan a exception region looking for the matching end and then
2072 remove it if possible. INSN is the start of the region, N is the
2073 region number, and DELETE_OUTER is to note if anything in this
2074 region can throw.
2075
2076 Regions are removed if they cannot possibly catch an exception.
2077 This is determined by invoking can_throw on each insn within the
2078 region; if can_throw returns true for any of the instructions, the
2079 region can catch an exception, since there is an insn within the
2080 region that is capable of throwing an exception.
2081
2082 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2083 calls abort if it can't find one.
2084
2085 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2086 correspond to the region number, or if DELETE_OUTER is NULL. */
2087
2088 static rtx
2089 scan_region (insn, n, delete_outer)
2090 rtx insn;
2091 int n;
2092 int *delete_outer;
2093 {
2094 rtx start = insn;
2095
2096 /* Assume we can delete the region. */
2097 int delete = 1;
2098
2099 if (insn == NULL_RTX
2100 || GET_CODE (insn) != NOTE
2101 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
2102 || NOTE_BLOCK_NUMBER (insn) != n
2103 || delete_outer == NULL)
2104 abort ();
2105
2106 insn = NEXT_INSN (insn);
2107
2108 /* Look for the matching end. */
2109 while (! (GET_CODE (insn) == NOTE
2110 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2111 {
2112 /* If anything can throw, we can't remove the region. */
2113 if (delete && can_throw (insn))
2114 {
2115 delete = 0;
2116 }
2117
2118 /* Watch out for and handle nested regions. */
2119 if (GET_CODE (insn) == NOTE
2120 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2121 {
2122 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2123 }
2124
2125 insn = NEXT_INSN (insn);
2126 }
2127
2128 /* The _BEG/_END NOTEs must match and nest. */
2129 if (NOTE_BLOCK_NUMBER (insn) != n)
2130 abort ();
2131
2132 /* If anything in this exception region can throw, we can throw. */
2133 if (! delete)
2134 *delete_outer = 0;
2135 else
2136 {
2137 /* Delete the start and end of the region. */
2138 delete_insn (start);
2139 delete_insn (insn);
2140
2141 /* Only do this part if we have built the exception handler
2142 labels. */
2143 if (exception_handler_labels)
2144 {
2145 rtx x, *prev = &exception_handler_labels;
2146
2147 /* Find it in the list of handlers. */
2148 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2149 {
2150 rtx label = XEXP (x, 0);
2151 if (CODE_LABEL_NUMBER (label) == n)
2152 {
2153 /* If we are the last reference to the handler,
2154 delete it. */
2155 if (--LABEL_NUSES (label) == 0)
2156 delete_insn (label);
2157
2158 if (optimize)
2159 {
2160 /* Remove it from the list of exception handler
2161 labels, if we are optimizing. If we are not, then
2162 leave it in the list, as we are not really going to
2163 remove the region. */
2164 *prev = XEXP (x, 1);
2165 XEXP (x, 1) = 0;
2166 XEXP (x, 0) = 0;
2167 }
2168
2169 break;
2170 }
2171 prev = &XEXP (x, 1);
2172 }
2173 }
2174 }
2175 return insn;
2176 }
2177
2178 /* Perform various interesting optimizations for exception handling
2179 code.
2180
2181 We look for empty exception regions and make them go (away). The
2182 jump optimization code will remove the handler if nothing else uses
2183 it. */
2184
2185 void
2186 exception_optimize ()
2187 {
2188 rtx insn, regions = NULL_RTX;
2189 int n;
2190
2191 /* Remove empty regions. */
2192 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2193 {
2194 if (GET_CODE (insn) == NOTE
2195 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2196 {
2197 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2198 insn, we will indirectly skip through all the insns
2199 inbetween. We are also guaranteed that the value of insn
2200 returned will be valid, as otherwise scan_region won't
2201 return. */
2202 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2203 }
2204 }
2205 }
2206 \f
2207 /* Various hooks for the DWARF 2 __throw routine. */
2208
2209 /* Do any necessary initialization to access arbitrary stack frames.
2210 On the SPARC, this means flushing the register windows. */
2211
2212 void
2213 expand_builtin_unwind_init ()
2214 {
2215 /* Set this so all the registers get saved in our frame; we need to be
2216 able to copy the saved values for any registers from frames we unwind. */
2217 current_function_has_nonlocal_label = 1;
2218
2219 #ifdef SETUP_FRAME_ADDRESSES
2220 SETUP_FRAME_ADDRESSES ();
2221 #endif
2222 }
2223
2224 /* Given a value extracted from the return address register or stack slot,
2225 return the actual address encoded in that value. */
2226
2227 rtx
2228 expand_builtin_extract_return_addr (addr_tree)
2229 tree addr_tree;
2230 {
2231 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2232 return eh_outer_context (addr);
2233 }
2234
2235 /* Given an actual address in addr_tree, do any necessary encoding
2236 and return the value to be stored in the return address register or
2237 stack slot so the epilogue will return to that address. */
2238
2239 rtx
2240 expand_builtin_frob_return_addr (addr_tree)
2241 tree addr_tree;
2242 {
2243 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2244 #ifdef RETURN_ADDR_OFFSET
2245 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2246 #endif
2247 return addr;
2248 }
2249
2250 /* Given an actual address in addr_tree, set the return address register up
2251 so the epilogue will return to that address. If the return address is
2252 not in a register, do nothing. */
2253
2254 void
2255 expand_builtin_set_return_addr_reg (addr_tree)
2256 tree addr_tree;
2257 {
2258 rtx tmp;
2259 rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2260 0, hard_frame_pointer_rtx);
2261
2262 if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2263 return;
2264
2265 tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2266 if (tmp != ra)
2267 emit_move_insn (ra, tmp);
2268 }
2269
2270 /* Choose two registers for communication between the main body of
2271 __throw and the stub for adjusting the stack pointer. The first register
2272 is used to pass the address of the exception handler; the second register
2273 is used to pass the stack pointer offset.
2274
2275 For register 1 we use the return value register for a void *.
2276 For register 2 we use the static chain register if it exists and is
2277 different from register 1, otherwise some arbitrary call-clobbered
2278 register. */
2279
2280 static void
2281 eh_regs (r1, r2, outgoing)
2282 rtx *r1, *r2;
2283 int outgoing;
2284 {
2285 rtx reg1, reg2;
2286
2287 #ifdef FUNCTION_OUTGOING_VALUE
2288 if (outgoing)
2289 reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2290 current_function_decl);
2291 else
2292 #endif
2293 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2294 current_function_decl);
2295
2296 #ifdef STATIC_CHAIN_REGNUM
2297 if (outgoing)
2298 reg2 = static_chain_incoming_rtx;
2299 else
2300 reg2 = static_chain_rtx;
2301 if (REGNO (reg2) == REGNO (reg1))
2302 #endif /* STATIC_CHAIN_REGNUM */
2303 reg2 = NULL_RTX;
2304
2305 if (reg2 == NULL_RTX)
2306 {
2307 int i;
2308 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2309 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2310 {
2311 reg2 = gen_rtx (REG, Pmode, i);
2312 break;
2313 }
2314
2315 if (reg2 == NULL_RTX)
2316 abort ();
2317 }
2318
2319 *r1 = reg1;
2320 *r2 = reg2;
2321 }
2322
2323 /* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2324 to the exception handler. __throw will set up the necessary values
2325 and then return to the stub. */
2326
2327 rtx
2328 expand_builtin_eh_stub ()
2329 {
2330 rtx stub_start = gen_label_rtx ();
2331 rtx after_stub = gen_label_rtx ();
2332 rtx handler, offset, temp;
2333
2334 emit_jump (after_stub);
2335 emit_label (stub_start);
2336
2337 eh_regs (&handler, &offset, 0);
2338
2339 adjust_stack (offset);
2340 emit_indirect_jump (handler);
2341
2342 emit_label (after_stub);
2343 return gen_rtx (LABEL_REF, Pmode, stub_start);
2344 }
2345
2346 /* Set up the registers for passing the handler address and stack offset
2347 to the stub above. */
2348
2349 void
2350 expand_builtin_set_eh_regs (handler, offset)
2351 tree handler, offset;
2352 {
2353 rtx reg1, reg2;
2354
2355 eh_regs (&reg1, &reg2, 1);
2356
2357 store_expr (offset, reg2, 0);
2358 store_expr (handler, reg1, 0);
2359
2360 /* These will be used by the stub. */
2361 emit_insn (gen_rtx (USE, VOIDmode, reg1));
2362 emit_insn (gen_rtx (USE, VOIDmode, reg2));
2363 }
This page took 0.144761 seconds and 6 git commands to generate.