]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
tree.c (restore_tree_status): Also free up temporary storage when we finish a topleve...
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-95, 1996 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code assocated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fnosjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementaion, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 If the target does not use the DWARF 2 frame unwind information, at
91 program startup each object file invokes a function named
92 __register_exceptions with the address of its local
93 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94 is responsible for recording all of the exception regions into one list
95 (which is kept in a static variable named exception_table_list).
96
97 On targets that support crtstuff.c, the unwind information
98 is stored in a section named .eh_frame and the information for the
99 entire shared object or program is registered with a call to
100 __register_frame. On other targets, the information for each
101 translation unit is registered from the file generated by collect2.
102 __register_frame is defined in frame.c, and is responsible for
103 recording all of the unwind regions into one list (which is kept in a
104 static variable named unwind_table_list).
105
106 The function __throw is actually responsible for doing the
107 throw. On machines that have unwind info support, __throw is generated
108 by code in libgcc2.c, otherwise __throw is generated on a
109 per-object-file basis for each source file compiled with
110 -fexceptions by the the C++ frontend. Before __throw is invoked,
111 the current context of the throw needs to be placed in the global
112 variable __eh_pc.
113
114 __throw attempts to find the appropriate exception handler for the
115 PC value stored in __eh_pc by calling __find_first_exception_table_match
116 (which is defined in libgcc2.c). If __find_first_exception_table_match
117 finds a relevant handler, __throw transfers control directly to it.
118
119 If a handler for the context being thrown from can't be found, __throw
120 walks (see Walking the stack below) the stack up the dynamic call chain to
121 continue searching for an appropriate exception handler based upon the
122 caller of the function it last sought a exception handler for. It stops
123 then either an exception handler is found, or when the top of the
124 call chain is reached.
125
126 If no handler is found, an external library function named
127 __terminate is called. If a handler is found, then we restart
128 our search for a handler at the end of the call chain, and repeat
129 the search process, but instead of just walking up the call chain,
130 we unwind the call chain as we walk up it.
131
132 Internal implementation details:
133
134 To associate a user-defined handler with a block of statements, the
135 function expand_start_try_stmts is used to mark the start of the
136 block of statements with which the handler is to be associated
137 (which is known as a "try block"). All statements that appear
138 afterwards will be associated with the try block.
139
140 A call to expand_start_all_catch marks the end of the try block,
141 and also marks the start of the "catch block" (the user-defined
142 handler) associated with the try block.
143
144 This user-defined handler will be invoked for *every* exception
145 thrown with the context of the try block. It is up to the handler
146 to decide whether or not it wishes to handle any given exception,
147 as there is currently no mechanism in this implementation for doing
148 this. (There are plans for conditionally processing an exception
149 based on its "type", which will provide a language-independent
150 mechanism).
151
152 If the handler chooses not to process the exception (perhaps by
153 looking at an "exception type" or some other additional data
154 supplied with the exception), it can fall through to the end of the
155 handler. expand_end_all_catch and expand_leftover_cleanups
156 add additional code to the end of each handler to take care of
157 rethrowing to the outer exception handler.
158
159 The handler also has the option to continue with "normal flow of
160 code", or in other words to resume executing at the statement
161 immediately after the end of the exception region. The variable
162 caught_return_label_stack contains a stack of labels, and jumping
163 to the topmost entry's label via expand_goto will resume normal
164 flow to the statement immediately after the end of the exception
165 region. If the handler falls through to the end, the exception will
166 be rethrown to the outer exception region.
167
168 The instructions for the catch block are kept as a separate
169 sequence, and will be emitted at the end of the function along with
170 the handlers specified via expand_eh_region_end. The end of the
171 catch block is marked with expand_end_all_catch.
172
173 Any data associated with the exception must currently be handled by
174 some external mechanism maintained in the frontend. For example,
175 the C++ exception mechanism passes an arbitrary value along with
176 the exception, and this is handled in the C++ frontend by using a
177 global variable to hold the value. (This will be changing in the
178 future.)
179
180 The mechanism in C++ for handling data associated with the
181 exception is clearly not thread-safe. For a thread-based
182 environment, another mechanism must be used (possibly using a
183 per-thread allocation mechanism if the size of the area that needs
184 to be allocated isn't known at compile time.)
185
186 Internally-generated exception regions (cleanups) are marked by
187 calling expand_eh_region_start to mark the start of the region,
188 and expand_eh_region_end (handler) is used to both designate the
189 end of the region and to associate a specified handler/cleanup with
190 the region. The rtl code in HANDLER will be invoked whenever an
191 exception occurs in the region between the calls to
192 expand_eh_region_start and expand_eh_region_end. After HANDLER is
193 executed, additional code is emitted to handle rethrowing the
194 exception to the outer exception handler. The code for HANDLER will
195 be emitted at the end of the function.
196
197 TARGET_EXPRs can also be used to designate exception regions. A
198 TARGET_EXPR gives an unwind-protect style interface commonly used
199 in functional languages such as LISP. The associated expression is
200 evaluated, and whether or not it (or any of the functions that it
201 calls) throws an exception, the protect expression is always
202 invoked. This implementation takes care of the details of
203 associating an exception table entry with the expression and
204 generating the necessary code (it actually emits the protect
205 expression twice, once for normal flow and once for the exception
206 case). As for the other handlers, the code for the exception case
207 will be emitted at the end of the function.
208
209 Cleanups can also be specified by using add_partial_entry (handler)
210 and end_protect_partials. add_partial_entry creates the start of
211 a new exception region; HANDLER will be invoked if an exception is
212 thrown with the context of the region between the calls to
213 add_partial_entry and end_protect_partials. end_protect_partials is
214 used to mark the end of these regions. add_partial_entry can be
215 called as many times as needed before calling end_protect_partials.
216 However, end_protect_partials should only be invoked once for each
217 group of calls to add_partial_entry as the entries are queued
218 and all of the outstanding entries are processed simultaneously
219 when end_protect_partials is invoked. Similarly to the other
220 handlers, the code for HANDLER will be emitted at the end of the
221 function.
222
223 The generated RTL for an exception region includes
224 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225 the start and end of the exception region. A unique label is also
226 generated at the start of the exception region, which is available
227 by looking at the ehstack variable. The topmost entry corresponds
228 to the current region.
229
230 In the current implementation, an exception can only be thrown from
231 a function call (since the mechanism used to actually throw an
232 exception involves calling __throw). If an exception region is
233 created but no function calls occur within that region, the region
234 can be safely optimized away (along with its exception handlers)
235 since no exceptions can ever be caught in that region. This
236 optimization is performed unless -fasynchronous-exceptions is
237 given. If the user wishes to throw from a signal handler, or other
238 asynchronous place, -fasynchronous-exceptions should be used when
239 compiling for maximally correct code, at the cost of additional
240 exception regions. Using -fasynchronous-exceptions only produces
241 code that is reasonably safe in such situations, but a correct
242 program cannot rely upon this working. It can be used in failsafe
243 code, where trying to continue on, and proceeding with potentially
244 incorrect results is better than halting the program.
245
246
247 Walking the stack:
248
249 The stack is walked by starting with a pointer to the current
250 frame, and finding the pointer to the callers frame. The unwind info
251 tells __throw how to find it.
252
253 Unwinding the stack:
254
255 When we use the term unwinding the stack, we mean undoing the
256 effects of the function prologue in a controlled fashion so that we
257 still have the flow of control. Otherwise, we could just return
258 (jump to the normal end of function epilogue).
259
260 This is done in __throw in libgcc2.c when we know that a handler exists
261 in a frame higher up the call stack than its immediate caller.
262
263 To unwind, we find the unwind data associated with the frame, if any.
264 If we don't find any, we call the library routine __terminate. If we do
265 find it, we use the information to copy the saved register values from
266 that frame into the register save area in the frame for __throw, return
267 into a stub which updates the stack pointer, and jump to the handler.
268 The normal function epilogue for __throw handles restoring the saved
269 values into registers.
270
271 When unwinding, we use this method if we know it will
272 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
273 an inline unwinder will have been emitted for any function that
274 __unwind_function cannot unwind. The inline unwinder appears as a
275 normal exception handler for the entire function, for any function
276 that we know cannot be unwound by __unwind_function. We inform the
277 compiler of whether a function can be unwound with
278 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279 when the unwinder isn't needed. __unwind_function is used as an
280 action of last resort. If no other method can be used for
281 unwinding, __unwind_function is used. If it cannot unwind, it
282 should call __teminate.
283
284 By default, if the target-specific backend doesn't supply a definition
285 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286 unwinders will be used instead. The main tradeoff here is in text space
287 utilization. Obviously, if inline unwinders have to be generated
288 repeatedly, this uses much more space than if a single routine is used.
289
290 However, it is simply not possible on some platforms to write a
291 generalized routine for doing stack unwinding without having some
292 form of additional data associated with each function. The current
293 implementation can encode this data in the form of additional
294 machine instructions or as static data in tabular form. The later
295 is called the unwind data.
296
297 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299 defined and has a non-zero value, a per-function unwinder is not emitted
300 for the current function. If the static unwind data is supported, then
301 a per-function unwinder is not emitted.
302
303 On some platforms it is possible that neither __unwind_function
304 nor inlined unwinders are available. For these platforms it is not
305 possible to throw through a function call, and abort will be
306 invoked instead of performing the throw.
307
308 The reason the unwind data may be needed is that on some platforms
309 the order and types of data stored on the stack can vary depending
310 on the type of function, its arguments and returned values, and the
311 compilation options used (optimization versus non-optimization,
312 -fomit-frame-pointer, processor variations, etc).
313
314 Unfortunately, this also means that throwing through functions that
315 aren't compiled with exception handling support will still not be
316 possible on some platforms. This problem is currently being
317 investigated, but no solutions have been found that do not imply
318 some unacceptable performance penalties.
319
320 Future directions:
321
322 Currently __throw makes no differentiation between cleanups and
323 user-defined exception regions. While this makes the implementation
324 simple, it also implies that it is impossible to determine if a
325 user-defined exception handler exists for a given exception without
326 completely unwinding the stack in the process. This is undesirable
327 from the standpoint of debugging, as ideally it would be possible
328 to trap unhandled exceptions in the debugger before the process of
329 unwinding has even started.
330
331 This problem can be solved by marking user-defined handlers in a
332 special way (probably by adding additional bits to exception_table_list).
333 A two-pass scheme could then be used by __throw to iterate
334 through the table. The first pass would search for a relevant
335 user-defined handler for the current context of the throw, and if
336 one is found, the second pass would then invoke all needed cleanups
337 before jumping to the user-defined handler.
338
339 Many languages (including C++ and Ada) make execution of a
340 user-defined handler conditional on the "type" of the exception
341 thrown. (The type of the exception is actually the type of the data
342 that is thrown with the exception.) It will thus be necessary for
343 __throw to be able to determine if a given user-defined
344 exception handler will actually be executed, given the type of
345 exception.
346
347 One scheme is to add additional information to exception_table_list
348 as to the types of exceptions accepted by each handler. __throw
349 can do the type comparisons and then determine if the handler is
350 actually going to be executed.
351
352 There is currently no significant level of debugging support
353 available, other than to place a breakpoint on __throw. While
354 this is sufficient in most cases, it would be helpful to be able to
355 know where a given exception was going to be thrown to before it is
356 actually thrown, and to be able to choose between stopping before
357 every exception region (including cleanups), or just user-defined
358 exception regions. This should be possible to do in the two-pass
359 scheme by adding additional labels to __throw for appropriate
360 breakpoints, and additional debugger commands could be added to
361 query various state variables to determine what actions are to be
362 performed next.
363
364 Another major problem that is being worked on is the issue with stack
365 unwinding on various platforms. Currently the only platforms that have
366 support for the generation of a generic unwinder are the SPARC and MIPS.
367 All other ports require per-function unwinders, which produce large
368 amounts of code bloat.
369
370 For setjmp/longjmp based exception handling, some of the details
371 are as above, but there are some additional details. This section
372 discusses the details.
373
374 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
375 optimize EH regions yet. We don't have to worry about machine
376 specific issues with unwinding the stack, as we rely upon longjmp
377 for all the machine specific details. There is no variable context
378 of a throw, just the one implied by the dynamic handler stack
379 pointed to by the dynamic handler chain. There is no exception
380 table, and no calls to __register_excetpions. __sjthrow is used
381 instead of __throw, and it works by using the dynamic handler
382 chain, and longjmp. -fasynchronous-exceptions has no effect, as
383 the elimination of trivial exception regions is not yet performed.
384
385 A frontend can set protect_cleanup_actions_with_terminate when all
386 the cleanup actions should be protected with an EH region that
387 calls terminate when an unhandled exception is throw. C++ does
388 this, Ada does not. */
389
390
391 #include "config.h"
392 #include "defaults.h"
393 #include <stdio.h>
394 #include "rtl.h"
395 #include "tree.h"
396 #include "flags.h"
397 #include "except.h"
398 #include "function.h"
399 #include "insn-flags.h"
400 #include "expr.h"
401 #include "insn-codes.h"
402 #include "regs.h"
403 #include "hard-reg-set.h"
404 #include "insn-config.h"
405 #include "recog.h"
406 #include "output.h"
407 #include "assert.h"
408
409 /* One to use setjmp/longjmp method of generating code for exception
410 handling. */
411
412 int exceptions_via_longjmp = 2;
413
414 /* One to enable asynchronous exception support. */
415
416 int asynchronous_exceptions = 0;
417
418 /* One to protect cleanup actions with a handler that calls
419 __terminate, zero otherwise. */
420
421 int protect_cleanup_actions_with_terminate = 0;
422
423 /* A list of labels used for exception handlers. Created by
424 find_exception_handler_labels for the optimization passes. */
425
426 rtx exception_handler_labels;
427
428 /* Nonzero means that __throw was invoked.
429
430 This is used by the C++ frontend to know if code needs to be emitted
431 for __throw or not. */
432
433 int throw_used;
434
435 /* The dynamic handler chain. Nonzero if the function has already
436 fetched a pointer to the dynamic handler chain for exception
437 handling. */
438
439 rtx current_function_dhc;
440
441 /* The dynamic cleanup chain. Nonzero if the function has already
442 fetched a pointer to the dynamic cleanup chain for exception
443 handling. */
444
445 rtx current_function_dcc;
446
447 /* A stack used for keeping track of the currectly active exception
448 handling region. As each exception region is started, an entry
449 describing the region is pushed onto this stack. The current
450 region can be found by looking at the top of the stack, and as we
451 exit regions, the corresponding entries are popped.
452
453 Entries cannot overlap; they can be nested. So there is only one
454 entry at most that corresponds to the current instruction, and that
455 is the entry on the top of the stack. */
456
457 static struct eh_stack ehstack;
458
459 /* A queue used for tracking which exception regions have closed but
460 whose handlers have not yet been expanded. Regions are emitted in
461 groups in an attempt to improve paging performance.
462
463 As we exit a region, we enqueue a new entry. The entries are then
464 dequeued during expand_leftover_cleanups and expand_start_all_catch,
465
466 We should redo things so that we either take RTL for the handler,
467 or we expand the handler expressed as a tree immediately at region
468 end time. */
469
470 static struct eh_queue ehqueue;
471
472 /* Insns for all of the exception handlers for the current function.
473 They are currently emitted by the frontend code. */
474
475 rtx catch_clauses;
476
477 /* A TREE_CHAINed list of handlers for regions that are not yet
478 closed. The TREE_VALUE of each entry contains the handler for the
479 corresponding entry on the ehstack. */
480
481 static tree protect_list;
482
483 /* Stacks to keep track of various labels. */
484
485 /* Keeps track of the label to resume to should one want to resume
486 normal control flow out of a handler (instead of, say, returning to
487 the caller of the current function or exiting the program). Also
488 used as the context of a throw to rethrow an exception to the outer
489 exception region. */
490
491 struct label_node *caught_return_label_stack = NULL;
492
493 /* A random data area for the front end's own use. */
494
495 struct label_node *false_label_stack = NULL;
496
497 /* The rtx and the tree for the saved PC value. */
498
499 rtx eh_saved_pc_rtx;
500 tree eh_saved_pc;
501
502 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
503 \f
504 /* Various support routines to manipulate the various data structures
505 used by the exception handling code. */
506
507 /* Push a label entry onto the given STACK. */
508
509 void
510 push_label_entry (stack, rlabel, tlabel)
511 struct label_node **stack;
512 rtx rlabel;
513 tree tlabel;
514 {
515 struct label_node *newnode
516 = (struct label_node *) xmalloc (sizeof (struct label_node));
517
518 if (rlabel)
519 newnode->u.rlabel = rlabel;
520 else
521 newnode->u.tlabel = tlabel;
522 newnode->chain = *stack;
523 *stack = newnode;
524 }
525
526 /* Pop a label entry from the given STACK. */
527
528 rtx
529 pop_label_entry (stack)
530 struct label_node **stack;
531 {
532 rtx label;
533 struct label_node *tempnode;
534
535 if (! *stack)
536 return NULL_RTX;
537
538 tempnode = *stack;
539 label = tempnode->u.rlabel;
540 *stack = (*stack)->chain;
541 free (tempnode);
542
543 return label;
544 }
545
546 /* Return the top element of the given STACK. */
547
548 tree
549 top_label_entry (stack)
550 struct label_node **stack;
551 {
552 if (! *stack)
553 return NULL_TREE;
554
555 return (*stack)->u.tlabel;
556 }
557
558 /* Make a copy of ENTRY using xmalloc to allocate the space. */
559
560 static struct eh_entry *
561 copy_eh_entry (entry)
562 struct eh_entry *entry;
563 {
564 struct eh_entry *newentry;
565
566 newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
567 bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
568
569 return newentry;
570 }
571
572 /* Push a new eh_node entry onto STACK. */
573
574 static void
575 push_eh_entry (stack)
576 struct eh_stack *stack;
577 {
578 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
579 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
580
581 entry->outer_context = gen_label_rtx ();
582 entry->exception_handler_label = gen_label_rtx ();
583 entry->finalization = NULL_TREE;
584
585 node->entry = entry;
586 node->chain = stack->top;
587 stack->top = node;
588 }
589
590 /* Pop an entry from the given STACK. */
591
592 static struct eh_entry *
593 pop_eh_entry (stack)
594 struct eh_stack *stack;
595 {
596 struct eh_node *tempnode;
597 struct eh_entry *tempentry;
598
599 tempnode = stack->top;
600 tempentry = tempnode->entry;
601 stack->top = stack->top->chain;
602 free (tempnode);
603
604 return tempentry;
605 }
606
607 /* Enqueue an ENTRY onto the given QUEUE. */
608
609 static void
610 enqueue_eh_entry (queue, entry)
611 struct eh_queue *queue;
612 struct eh_entry *entry;
613 {
614 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
615
616 node->entry = entry;
617 node->chain = NULL;
618
619 if (queue->head == NULL)
620 {
621 queue->head = node;
622 }
623 else
624 {
625 queue->tail->chain = node;
626 }
627 queue->tail = node;
628 }
629
630 /* Dequeue an entry from the given QUEUE. */
631
632 static struct eh_entry *
633 dequeue_eh_entry (queue)
634 struct eh_queue *queue;
635 {
636 struct eh_node *tempnode;
637 struct eh_entry *tempentry;
638
639 if (queue->head == NULL)
640 return NULL;
641
642 tempnode = queue->head;
643 queue->head = queue->head->chain;
644
645 tempentry = tempnode->entry;
646 free (tempnode);
647
648 return tempentry;
649 }
650 \f
651 /* Routine to see if exception exception handling is turned on.
652 DO_WARN is non-zero if we want to inform the user that exception
653 handling is turned off.
654
655 This is used to ensure that -fexceptions has been specified if the
656 compiler tries to use any exception-specific functions. */
657
658 int
659 doing_eh (do_warn)
660 int do_warn;
661 {
662 if (! flag_exceptions)
663 {
664 static int warned = 0;
665 if (! warned && do_warn)
666 {
667 error ("exception handling disabled, use -fexceptions to enable");
668 warned = 1;
669 }
670 return 0;
671 }
672 return 1;
673 }
674
675 /* Given a return address in ADDR, determine the address we should use
676 to find the corresponding EH region. */
677
678 rtx
679 eh_outer_context (addr)
680 rtx addr;
681 {
682 /* First mask out any unwanted bits. */
683 #ifdef MASK_RETURN_ADDR
684 expand_and (addr, MASK_RETURN_ADDR, addr);
685 #endif
686
687 /* Then adjust to find the real return address. */
688 #if defined (RETURN_ADDR_OFFSET)
689 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
690 #endif
691
692 return addr;
693 }
694
695 /* Start a new exception region for a region of code that has a
696 cleanup action and push the HANDLER for the region onto
697 protect_list. All of the regions created with add_partial_entry
698 will be ended when end_protect_partials is invoked. */
699
700 void
701 add_partial_entry (handler)
702 tree handler;
703 {
704 expand_eh_region_start ();
705
706 /* Make sure the entry is on the correct obstack. */
707 push_obstacks_nochange ();
708 resume_temporary_allocation ();
709
710 /* Because this is a cleanup action, we may have to protect the handler
711 with __terminate. */
712 handler = protect_with_terminate (handler);
713
714 protect_list = tree_cons (NULL_TREE, handler, protect_list);
715 pop_obstacks ();
716 }
717
718 /* Get a reference to the dynamic handler chain. It points to the
719 pointer to the next element in the dynamic handler chain. It ends
720 when there are no more elements in the dynamic handler chain, when
721 the value is &top_elt from libgcc2.c. Immediately after the
722 pointer, is an area suitable for setjmp/longjmp when
723 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
724 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
725 isn't defined.
726
727 This routine is here to facilitate the porting of this code to
728 systems with threads. One can either replace the routine we emit a
729 call for here in libgcc2.c, or one can modify this routine to work
730 with their thread system. */
731
732 rtx
733 get_dynamic_handler_chain ()
734 {
735 #if 0
736 /* Do this once we figure out how to get this to the front of the
737 function, and we really only want one per real function, not one
738 per inlined function. */
739 if (current_function_dhc == 0)
740 {
741 rtx dhc, insns;
742 start_sequence ();
743
744 dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
745 NULL_RTX, 1,
746 Pmode, 0);
747 current_function_dhc = copy_to_reg (dhc);
748 insns = get_insns ();
749 end_sequence ();
750 emit_insns_before (insns, get_first_nonparm_insn ());
751 }
752 #else
753 rtx dhc;
754 dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
755 NULL_RTX, 1,
756 Pmode, 0);
757 current_function_dhc = copy_to_reg (dhc);
758 #endif
759
760 /* We don't want a copy of the dhc, but rather, the single dhc. */
761 return gen_rtx (MEM, Pmode, current_function_dhc);
762 }
763
764 /* Get a reference to the dynamic cleanup chain. It points to the
765 pointer to the next element in the dynamic cleanup chain.
766 Immediately after the pointer, are two Pmode variables, one for a
767 pointer to a function that performs the cleanup action, and the
768 second, the argument to pass to that function. */
769
770 rtx
771 get_dynamic_cleanup_chain ()
772 {
773 rtx dhc, dcc;
774
775 dhc = get_dynamic_handler_chain ();
776 dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
777
778 current_function_dcc = copy_to_reg (dcc);
779
780 /* We don't want a copy of the dcc, but rather, the single dcc. */
781 return gen_rtx (MEM, Pmode, current_function_dcc);
782 }
783
784 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
785 LABEL is an rtx of code CODE_LABEL, in this function. */
786
787 void
788 jumpif_rtx (x, label)
789 rtx x;
790 rtx label;
791 {
792 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
793 }
794
795 /* Generate code to evaluate X and jump to LABEL if the value is zero.
796 LABEL is an rtx of code CODE_LABEL, in this function. */
797
798 void
799 jumpifnot_rtx (x, label)
800 rtx x;
801 rtx label;
802 {
803 jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
804 }
805
806 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
807 We just need to create an element for the cleanup list, and push it
808 into the chain.
809
810 A dynamic cleanup is a cleanup action implied by the presence of an
811 element on the EH runtime dynamic cleanup stack that is to be
812 performed when an exception is thrown. The cleanup action is
813 performed by __sjthrow when an exception is thrown. Only certain
814 actions can be optimized into dynamic cleanup actions. For the
815 restrictions on what actions can be performed using this routine,
816 see expand_eh_region_start_tree. */
817
818 static void
819 start_dynamic_cleanup (func, arg)
820 tree func;
821 tree arg;
822 {
823 rtx dhc, dcc;
824 rtx new_func, new_arg;
825 rtx x, buf;
826 int size;
827
828 /* We allocate enough room for a pointer to the function, and
829 one argument. */
830 size = 2;
831
832 /* XXX, FIXME: The stack space allocated this way is too long lived,
833 but there is no allocation routine that allocates at the level of
834 the last binding contour. */
835 buf = assign_stack_local (BLKmode,
836 GET_MODE_SIZE (Pmode)*(size+1),
837 0);
838
839 buf = change_address (buf, Pmode, NULL_RTX);
840
841 /* Store dcc into the first word of the newly allocated buffer. */
842
843 dcc = get_dynamic_cleanup_chain ();
844 emit_move_insn (buf, dcc);
845
846 /* Store func and arg into the cleanup list element. */
847
848 new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
849 GET_MODE_SIZE (Pmode)));
850 new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
851 GET_MODE_SIZE (Pmode)*2));
852 x = expand_expr (func, new_func, Pmode, 0);
853 if (x != new_func)
854 emit_move_insn (new_func, x);
855
856 x = expand_expr (arg, new_arg, Pmode, 0);
857 if (x != new_arg)
858 emit_move_insn (new_arg, x);
859
860 /* Update the cleanup chain. */
861
862 emit_move_insn (dcc, XEXP (buf, 0));
863 }
864
865 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
866 handler stack. This should only be used by expand_eh_region_start
867 or expand_eh_region_start_tree. */
868
869 static void
870 start_dynamic_handler ()
871 {
872 rtx dhc, dcc;
873 rtx x, arg, buf;
874 int size;
875
876 #ifndef DONT_USE_BUILTIN_SETJMP
877 /* The number of Pmode words for the setjmp buffer, when using the
878 builtin setjmp/longjmp, see expand_builtin, case
879 BUILT_IN_LONGJMP. */
880 size = 5;
881 #else
882 #ifdef JMP_BUF_SIZE
883 size = JMP_BUF_SIZE;
884 #else
885 /* Should be large enough for most systems, if it is not,
886 JMP_BUF_SIZE should be defined with the proper value. It will
887 also tend to be larger than necessary for most systems, a more
888 optimal port will define JMP_BUF_SIZE. */
889 size = FIRST_PSEUDO_REGISTER+2;
890 #endif
891 #endif
892 /* XXX, FIXME: The stack space allocated this way is too long lived,
893 but there is no allocation routine that allocates at the level of
894 the last binding contour. */
895 arg = assign_stack_local (BLKmode,
896 GET_MODE_SIZE (Pmode)*(size+1),
897 0);
898
899 arg = change_address (arg, Pmode, NULL_RTX);
900
901 /* Store dhc into the first word of the newly allocated buffer. */
902
903 dhc = get_dynamic_handler_chain ();
904 dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
905 GET_MODE_SIZE (Pmode)));
906 emit_move_insn (arg, dhc);
907
908 /* Zero out the start of the cleanup chain. */
909 emit_move_insn (dcc, const0_rtx);
910
911 /* The jmpbuf starts two words into the area allocated. */
912 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
913
914 #ifdef DONT_USE_BUILTIN_SETJMP
915 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
916 buf, Pmode);
917 #else
918 x = expand_builtin_setjmp (buf, NULL_RTX);
919 #endif
920
921 /* If we come back here for a catch, transfer control to the
922 handler. */
923
924 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
925
926 /* We are committed to this, so update the handler chain. */
927
928 emit_move_insn (dhc, XEXP (arg, 0));
929 }
930
931 /* Start an exception handling region for the given cleanup action.
932 All instructions emitted after this point are considered to be part
933 of the region until expand_eh_region_end is invoked. CLEANUP is
934 the cleanup action to perform. The return value is true if the
935 exception region was optimized away. If that case,
936 expand_eh_region_end does not need to be called for this cleanup,
937 nor should it be.
938
939 This routine notices one particular common case in C++ code
940 generation, and optimizes it so as to not need the exception
941 region. It works by creating a dynamic cleanup action, instead of
942 of a using an exception region. */
943
944 int
945 expand_eh_region_start_tree (decl, cleanup)
946 tree decl;
947 tree cleanup;
948 {
949 rtx note;
950
951 /* This is the old code. */
952 if (! doing_eh (0))
953 return 0;
954
955 /* The optimization only applies to actions protected with
956 terminate, and only applies if we are using the setjmp/longjmp
957 codegen method. */
958 if (exceptions_via_longjmp
959 && protect_cleanup_actions_with_terminate)
960 {
961 tree func, arg;
962 tree args;
963
964 /* Ignore any UNSAVE_EXPR. */
965 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
966 cleanup = TREE_OPERAND (cleanup, 0);
967
968 /* Further, it only applies if the action is a call, if there
969 are 2 arguments, and if the second argument is 2. */
970
971 if (TREE_CODE (cleanup) == CALL_EXPR
972 && (args = TREE_OPERAND (cleanup, 1))
973 && (func = TREE_OPERAND (cleanup, 0))
974 && (arg = TREE_VALUE (args))
975 && (args = TREE_CHAIN (args))
976
977 /* is the second argument 2? */
978 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
979 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
980 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
981
982 /* Make sure there are no other arguments. */
983 && TREE_CHAIN (args) == NULL_TREE)
984 {
985 /* Arrange for returns and gotos to pop the entry we make on the
986 dynamic cleanup stack. */
987 expand_dcc_cleanup (decl);
988 start_dynamic_cleanup (func, arg);
989 return 1;
990 }
991 }
992
993 expand_eh_region_start_for_decl (decl);
994
995 return 0;
996 }
997
998 /* Just like expand_eh_region_start, except if a cleanup action is
999 entered on the cleanup chain, the TREE_PURPOSE of the element put
1000 on the chain is DECL. DECL should be the associated VAR_DECL, if
1001 any, otherwise it should be NULL_TREE. */
1002
1003 void
1004 expand_eh_region_start_for_decl (decl)
1005 tree decl;
1006 {
1007 rtx note;
1008
1009 /* This is the old code. */
1010 if (! doing_eh (0))
1011 return;
1012
1013 if (exceptions_via_longjmp)
1014 {
1015 /* We need a new block to record the start and end of the
1016 dynamic handler chain. We could always do this, but we
1017 really want to permit jumping into such a block, and we want
1018 to avoid any errors or performance impact in the SJ EH code
1019 for now. */
1020 expand_start_bindings (0);
1021
1022 /* But we don't need or want a new temporary level. */
1023 pop_temp_slots ();
1024
1025 /* Mark this block as created by expand_eh_region_start. This
1026 is so that we can pop the block with expand_end_bindings
1027 automatically. */
1028 mark_block_as_eh_region ();
1029
1030 /* Arrange for returns and gotos to pop the entry we make on the
1031 dynamic handler stack. */
1032 expand_dhc_cleanup (decl);
1033 }
1034
1035 if (exceptions_via_longjmp == 0)
1036 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1037 push_eh_entry (&ehstack);
1038 if (exceptions_via_longjmp == 0)
1039 NOTE_BLOCK_NUMBER (note)
1040 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1041 if (exceptions_via_longjmp)
1042 start_dynamic_handler ();
1043 }
1044
1045 /* Start an exception handling region. All instructions emitted after
1046 this point are considered to be part of the region until
1047 expand_eh_region_end is invoked. */
1048
1049 void
1050 expand_eh_region_start ()
1051 {
1052 expand_eh_region_start_for_decl (NULL_TREE);
1053 }
1054
1055 /* End an exception handling region. The information about the region
1056 is found on the top of ehstack.
1057
1058 HANDLER is either the cleanup for the exception region, or if we're
1059 marking the end of a try block, HANDLER is integer_zero_node.
1060
1061 HANDLER will be transformed to rtl when expand_leftover_cleanups
1062 is invoked. */
1063
1064 void
1065 expand_eh_region_end (handler)
1066 tree handler;
1067 {
1068 struct eh_entry *entry;
1069
1070 if (! doing_eh (0))
1071 return;
1072
1073 entry = pop_eh_entry (&ehstack);
1074
1075 if (exceptions_via_longjmp == 0)
1076 {
1077 rtx label;
1078 rtx note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1079 NOTE_BLOCK_NUMBER (note) = CODE_LABEL_NUMBER (entry->exception_handler_label);
1080
1081 label = gen_label_rtx ();
1082 emit_jump (label);
1083
1084 /* Emit a label marking the end of this exception region that
1085 is used for rethrowing into the outer context. */
1086 emit_label (entry->outer_context);
1087
1088 /* Put in something that takes up space, as otherwise the end
1089 address for this EH region could have the exact same address as
1090 its outer region. This would cause us to miss the fact that
1091 resuming exception handling with this PC value would be inside
1092 the outer region. */
1093 emit_insn (gen_nop ());
1094 emit_barrier ();
1095 emit_label (label);
1096 }
1097
1098 entry->finalization = handler;
1099
1100 enqueue_eh_entry (&ehqueue, entry);
1101
1102 /* If we have already started ending the bindings, don't recurse.
1103 This only happens when exceptions_via_longjmp is true. */
1104 if (is_eh_region ())
1105 {
1106 /* Because we don't need or want a new temporary level and
1107 because we didn't create one in expand_eh_region_start,
1108 create a fake one now to avoid removing one in
1109 expand_end_bindings. */
1110 push_temp_slots ();
1111
1112 mark_block_as_not_eh_region ();
1113
1114 /* Maybe do this to prevent jumping in and so on... */
1115 expand_end_bindings (NULL_TREE, 0, 0);
1116 }
1117 }
1118
1119 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1120 call to __sjthrow.
1121
1122 Otherwise, we emit a call to __throw and note that we threw
1123 something, so we know we need to generate the necessary code for
1124 __throw.
1125
1126 Before invoking throw, the __eh_pc variable must have been set up
1127 to contain the PC being thrown from. This address is used by
1128 __throw to determine which exception region (if any) is
1129 responsible for handling the exception. */
1130
1131 void
1132 emit_throw ()
1133 {
1134 if (exceptions_via_longjmp)
1135 {
1136 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1137 }
1138 else
1139 {
1140 #ifdef JUMP_TO_THROW
1141 emit_indirect_jump (throw_libfunc);
1142 #else
1143 #ifndef DWARF2_UNWIND_INFO
1144 /* Prevent assemble_external from doing anything with this symbol. */
1145 SYMBOL_REF_USED (throw_libfunc) = 1;
1146 #endif
1147 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1148 #endif
1149 throw_used = 1;
1150 }
1151 emit_barrier ();
1152 }
1153
1154 /* An internal throw with an indirect CONTEXT we want to throw from.
1155 CONTEXT evaluates to the context of the throw. */
1156
1157 static void
1158 expand_internal_throw_indirect (context)
1159 rtx context;
1160 {
1161 assemble_external (eh_saved_pc);
1162 emit_move_insn (eh_saved_pc_rtx, context);
1163 emit_throw ();
1164 }
1165
1166 /* An internal throw with a direct CONTEXT we want to throw from.
1167 CONTEXT must be a label; its address will be used as the context of
1168 the throw. */
1169
1170 void
1171 expand_internal_throw (context)
1172 rtx context;
1173 {
1174 expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context));
1175 }
1176
1177 /* Called from expand_exception_blocks and expand_end_catch_block to
1178 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1179
1180 void
1181 expand_leftover_cleanups ()
1182 {
1183 struct eh_entry *entry;
1184
1185 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1186 {
1187 rtx prev;
1188
1189 /* A leftover try block. Shouldn't be one here. */
1190 if (entry->finalization == integer_zero_node)
1191 abort ();
1192
1193 /* Output the label for the start of the exception handler. */
1194 emit_label (entry->exception_handler_label);
1195
1196 #ifdef HAVE_exception_receiver
1197 if (! exceptions_via_longjmp)
1198 if (HAVE_exception_receiver)
1199 emit_insn (gen_exception_receiver ());
1200 #endif
1201
1202 #ifdef HAVE_nonlocal_goto_receiver
1203 if (! exceptions_via_longjmp)
1204 if (HAVE_nonlocal_goto_receiver)
1205 emit_insn (gen_nonlocal_goto_receiver ());
1206 #endif
1207
1208 /* And now generate the insns for the handler. */
1209 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1210
1211 prev = get_last_insn ();
1212 if (prev == NULL || GET_CODE (prev) != BARRIER)
1213 {
1214 if (exceptions_via_longjmp)
1215 emit_throw ();
1216 else
1217 {
1218 /* The below can be optimized away, and we could just
1219 fall into the next EH handler, if we are certain they
1220 are nested. */
1221 /* Emit code to throw to the outer context if we fall off
1222 the end of the handler. */
1223 expand_internal_throw (entry->outer_context);
1224 }
1225 }
1226
1227 free (entry);
1228 }
1229 }
1230
1231 /* Called at the start of a block of try statements. */
1232 void
1233 expand_start_try_stmts ()
1234 {
1235 if (! doing_eh (1))
1236 return;
1237
1238 expand_eh_region_start ();
1239 }
1240
1241 /* Generate RTL for the start of a group of catch clauses.
1242
1243 It is responsible for starting a new instruction sequence for the
1244 instructions in the catch block, and expanding the handlers for the
1245 internally-generated exception regions nested within the try block
1246 corresponding to this catch block. */
1247
1248 void
1249 expand_start_all_catch ()
1250 {
1251 struct eh_entry *entry;
1252 tree label;
1253
1254 if (! doing_eh (1))
1255 return;
1256
1257 /* End the try block. */
1258 expand_eh_region_end (integer_zero_node);
1259
1260 emit_line_note (input_filename, lineno);
1261 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1262
1263 /* The label for the exception handling block that we will save.
1264 This is Lresume in the documention. */
1265 expand_label (label);
1266
1267 if (exceptions_via_longjmp == 0)
1268 {
1269 /* Put in something that takes up space, as otherwise the end
1270 address for the EH region could have the exact same address as
1271 the outer region, causing us to miss the fact that resuming
1272 exception handling with this PC value would be inside the outer
1273 region. */
1274 emit_insn (gen_nop ());
1275 }
1276
1277 /* Push the label that points to where normal flow is resumed onto
1278 the top of the label stack. */
1279 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1280
1281 /* Start a new sequence for all the catch blocks. We will add this
1282 to the global sequence catch_clauses when we have completed all
1283 the handlers in this handler-seq. */
1284 start_sequence ();
1285
1286 while (1)
1287 {
1288 rtx prev;
1289
1290 entry = dequeue_eh_entry (&ehqueue);
1291 /* Emit the label for the exception handler for this region, and
1292 expand the code for the handler.
1293
1294 Note that a catch region is handled as a side-effect here;
1295 for a try block, entry->finalization will contain
1296 integer_zero_node, so no code will be generated in the
1297 expand_expr call below. But, the label for the handler will
1298 still be emitted, so any code emitted after this point will
1299 end up being the handler. */
1300 emit_label (entry->exception_handler_label);
1301
1302 #ifdef HAVE_exception_receiver
1303 if (! exceptions_via_longjmp)
1304 if (HAVE_exception_receiver)
1305 emit_insn (gen_exception_receiver ());
1306 #endif
1307
1308 #ifdef HAVE_nonlocal_goto_receiver
1309 if (! exceptions_via_longjmp)
1310 if (HAVE_nonlocal_goto_receiver)
1311 emit_insn (gen_nonlocal_goto_receiver ());
1312 #endif
1313
1314 /* When we get down to the matching entry for this try block, stop. */
1315 if (entry->finalization == integer_zero_node)
1316 {
1317 /* Don't forget to free this entry. */
1318 free (entry);
1319 break;
1320 }
1321
1322 /* And now generate the insns for the handler. */
1323 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1324
1325 prev = get_last_insn ();
1326 if (prev == NULL || GET_CODE (prev) != BARRIER)
1327 {
1328 if (exceptions_via_longjmp)
1329 emit_throw ();
1330 else
1331 {
1332 /* Code to throw out to outer context when we fall off end
1333 of the handler. We can't do this here for catch blocks,
1334 so it's done in expand_end_all_catch instead.
1335
1336 The below can be optimized away (and we could just fall
1337 into the next EH handler) if we are certain they are
1338 nested. */
1339
1340 expand_internal_throw (entry->outer_context);
1341 }
1342 }
1343 free (entry);
1344 }
1345 }
1346
1347 /* Finish up the catch block. At this point all the insns for the
1348 catch clauses have already been generated, so we only have to add
1349 them to the catch_clauses list. We also want to make sure that if
1350 we fall off the end of the catch clauses that we rethrow to the
1351 outer EH region. */
1352
1353 void
1354 expand_end_all_catch ()
1355 {
1356 rtx new_catch_clause;
1357
1358 if (! doing_eh (1))
1359 return;
1360
1361 if (exceptions_via_longjmp)
1362 emit_throw ();
1363 else
1364 {
1365 /* Code to throw out to outer context, if we fall off end of catch
1366 handlers. This is rethrow (Lresume, same id, same obj) in the
1367 documentation. We use Lresume because we know that it will throw
1368 to the correct context.
1369
1370 In other words, if the catch handler doesn't exit or return, we
1371 do a "throw" (using the address of Lresume as the point being
1372 thrown from) so that the outer EH region can then try to process
1373 the exception. */
1374
1375 expand_internal_throw (DECL_RTL (top_label_entry (&caught_return_label_stack)));
1376 }
1377
1378 /* Now we have the complete catch sequence. */
1379 new_catch_clause = get_insns ();
1380 end_sequence ();
1381
1382 /* This level of catch blocks is done, so set up the successful
1383 catch jump label for the next layer of catch blocks. */
1384 pop_label_entry (&caught_return_label_stack);
1385
1386 /* Add the new sequence of catches to the main one for this function. */
1387 push_to_sequence (catch_clauses);
1388 emit_insns (new_catch_clause);
1389 catch_clauses = get_insns ();
1390 end_sequence ();
1391
1392 /* Here we fall through into the continuation code. */
1393 }
1394
1395 /* End all the pending exception regions on protect_list. The handlers
1396 will be emitted when expand_leftover_cleanups is invoked. */
1397
1398 void
1399 end_protect_partials ()
1400 {
1401 while (protect_list)
1402 {
1403 expand_eh_region_end (TREE_VALUE (protect_list));
1404 protect_list = TREE_CHAIN (protect_list);
1405 }
1406 }
1407
1408 /* Arrange for __terminate to be called if there is an unhandled throw
1409 from within E. */
1410
1411 tree
1412 protect_with_terminate (e)
1413 tree e;
1414 {
1415 /* We only need to do this when using setjmp/longjmp EH and the
1416 language requires it, as otherwise we protect all of the handlers
1417 at once, if we need to. */
1418 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1419 {
1420 tree handler, result;
1421
1422 /* All cleanups must be on the function_obstack. */
1423 push_obstacks_nochange ();
1424 resume_temporary_allocation ();
1425
1426 handler = make_node (RTL_EXPR);
1427 TREE_TYPE (handler) = void_type_node;
1428 RTL_EXPR_RTL (handler) = const0_rtx;
1429 TREE_SIDE_EFFECTS (handler) = 1;
1430 start_sequence_for_rtl_expr (handler);
1431
1432 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1433 emit_barrier ();
1434
1435 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1436 end_sequence ();
1437
1438 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1439 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1440 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1441 TREE_READONLY (result) = TREE_READONLY (e);
1442
1443 pop_obstacks ();
1444
1445 e = result;
1446 }
1447
1448 return e;
1449 }
1450 \f
1451 /* The exception table that we build that is used for looking up and
1452 dispatching exceptions, the current number of entries, and its
1453 maximum size before we have to extend it.
1454
1455 The number in eh_table is the code label number of the exception
1456 handler for the region. This is added by add_eh_table_entry and
1457 used by output_exception_table_entry. */
1458
1459 static int *eh_table;
1460 static int eh_table_size;
1461 static int eh_table_max_size;
1462
1463 /* Note the need for an exception table entry for region N. If we
1464 don't need to output an explicit exception table, avoid all of the
1465 extra work.
1466
1467 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1468 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1469 label number of the exception handler for the region. */
1470
1471 void
1472 add_eh_table_entry (n)
1473 int n;
1474 {
1475 #ifndef OMIT_EH_TABLE
1476 if (eh_table_size >= eh_table_max_size)
1477 {
1478 if (eh_table)
1479 {
1480 eh_table_max_size += eh_table_max_size>>1;
1481
1482 if (eh_table_max_size < 0)
1483 abort ();
1484
1485 eh_table = (int *) xrealloc (eh_table,
1486 eh_table_max_size * sizeof (int));
1487 }
1488 else
1489 {
1490 eh_table_max_size = 252;
1491 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1492 }
1493 }
1494 eh_table[eh_table_size++] = n;
1495 #endif
1496 }
1497
1498 /* Return a non-zero value if we need to output an exception table.
1499
1500 On some platforms, we don't have to output a table explicitly.
1501 This routine doesn't mean we don't have one. */
1502
1503 int
1504 exception_table_p ()
1505 {
1506 if (eh_table)
1507 return 1;
1508
1509 return 0;
1510 }
1511
1512 /* 1 if we need a static constructor to register EH table info. */
1513
1514 int
1515 register_exception_table_p ()
1516 {
1517 #if defined (DWARF2_UNWIND_INFO)
1518 return 0;
1519 #endif
1520
1521 return exception_table_p ();
1522 }
1523
1524 /* Output the entry of the exception table corresponding to to the
1525 exception region numbered N to file FILE.
1526
1527 N is the code label number corresponding to the handler of the
1528 region. */
1529
1530 static void
1531 output_exception_table_entry (file, n)
1532 FILE *file;
1533 int n;
1534 {
1535 char buf[256];
1536 rtx sym;
1537
1538 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1539 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1540 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1541
1542 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1543 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1544 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1545
1546 ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1547 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1548 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1549
1550 putc ('\n', file); /* blank line */
1551 }
1552
1553 /* Output the exception table if we have and need one. */
1554
1555 void
1556 output_exception_table ()
1557 {
1558 int i;
1559 extern FILE *asm_out_file;
1560
1561 if (! doing_eh (0) || ! eh_table)
1562 return;
1563
1564 exception_section ();
1565
1566 /* Beginning marker for table. */
1567 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1568 assemble_label ("__EXCEPTION_TABLE__");
1569
1570 for (i = 0; i < eh_table_size; ++i)
1571 output_exception_table_entry (asm_out_file, eh_table[i]);
1572
1573 free (eh_table);
1574
1575 /* Ending marker for table. */
1576 assemble_label ("__EXCEPTION_END__");
1577 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1578 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1579 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1580 putc ('\n', asm_out_file); /* blank line */
1581 }
1582
1583 /* Generate code to initialize the exception table at program startup
1584 time. */
1585
1586 void
1587 register_exception_table ()
1588 {
1589 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1590 VOIDmode, 1,
1591 gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1592 Pmode);
1593 }
1594 \f
1595 /* Emit the RTL for the start of the per-function unwinder for the
1596 current function. See emit_unwinder for further information.
1597
1598 DOESNT_NEED_UNWINDER is a target-specific macro that determines if
1599 the current function actually needs a per-function unwinder or not.
1600 By default, all functions need one. */
1601
1602 void
1603 start_eh_unwinder ()
1604 {
1605 #ifdef DOESNT_NEED_UNWINDER
1606 if (DOESNT_NEED_UNWINDER)
1607 return;
1608 #endif
1609
1610 /* If we are using the setjmp/longjmp implementation, we don't need a
1611 per function unwinder. */
1612
1613 if (exceptions_via_longjmp)
1614 return;
1615
1616 #ifdef DWARF2_UNWIND_INFO
1617 return;
1618 #endif
1619
1620 expand_eh_region_start ();
1621 }
1622
1623 /* Emit insns for the end of the per-function unwinder for the
1624 current function. */
1625
1626 void
1627 end_eh_unwinder ()
1628 {
1629 tree expr;
1630 rtx return_val_rtx, ret_val, label, end, insns;
1631
1632 if (! doing_eh (0))
1633 return;
1634
1635 #ifdef DOESNT_NEED_UNWINDER
1636 if (DOESNT_NEED_UNWINDER)
1637 return;
1638 #endif
1639
1640 /* If we are using the setjmp/longjmp implementation, we don't need a
1641 per function unwinder. */
1642
1643 if (exceptions_via_longjmp)
1644 return;
1645
1646 #ifdef DWARF2_UNWIND_INFO
1647 return;
1648 #else /* DWARF2_UNWIND_INFO */
1649
1650 assemble_external (eh_saved_pc);
1651
1652 expr = make_node (RTL_EXPR);
1653 TREE_TYPE (expr) = void_type_node;
1654 RTL_EXPR_RTL (expr) = const0_rtx;
1655 TREE_SIDE_EFFECTS (expr) = 1;
1656 start_sequence_for_rtl_expr (expr);
1657
1658 /* ret_val will contain the address of the code where the call
1659 to the current function occurred. */
1660 ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
1661 0, hard_frame_pointer_rtx);
1662 return_val_rtx = copy_to_reg (ret_val);
1663
1664 /* Get the address we need to use to determine what exception
1665 handler should be invoked, and store it in __eh_pc. */
1666 return_val_rtx = eh_outer_context (return_val_rtx);
1667 return_val_rtx = expand_binop (Pmode, sub_optab, return_val_rtx, GEN_INT (1),
1668 NULL_RTX, 0, OPTAB_LIB_WIDEN);
1669 emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
1670
1671 /* Either set things up so we do a return directly to __throw, or
1672 we return here instead. */
1673 #ifdef JUMP_TO_THROW
1674 emit_move_insn (ret_val, throw_libfunc);
1675 #else
1676 label = gen_label_rtx ();
1677 emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
1678 #endif
1679
1680 #ifdef RETURN_ADDR_OFFSET
1681 return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
1682 if (return_val_rtx != ret_val)
1683 emit_move_insn (ret_val, return_val_rtx);
1684 #endif
1685
1686 end = gen_label_rtx ();
1687 emit_jump (end);
1688
1689 RTL_EXPR_SEQUENCE (expr) = get_insns ();
1690 end_sequence ();
1691
1692 expand_eh_region_end (expr);
1693
1694 emit_jump (end);
1695
1696 #ifndef JUMP_TO_THROW
1697 emit_label (label);
1698 emit_throw ();
1699 #endif
1700
1701 expand_leftover_cleanups ();
1702
1703 emit_label (end);
1704
1705 #ifdef HAVE_return
1706 if (HAVE_return)
1707 {
1708 emit_jump_insn (gen_return ());
1709 emit_barrier ();
1710 }
1711 #endif
1712 #endif /* DWARF2_UNWIND_INFO */
1713 }
1714
1715 /* If necessary, emit insns for the per function unwinder for the
1716 current function. Called after all the code that needs unwind
1717 protection is output.
1718
1719 The unwinder takes care of catching any exceptions that have not
1720 been previously caught within the function, unwinding the stack to
1721 the next frame, and rethrowing using the address of the current
1722 function's caller as the context of the throw.
1723
1724 On some platforms __throw can do this by itself (or with the help
1725 of __unwind_function) so the per-function unwinder is
1726 unnecessary.
1727
1728 We cannot place the unwinder into the function until after we know
1729 we are done inlining, as we don't want to have more than one
1730 unwinder per non-inlined function. */
1731
1732 void
1733 emit_unwinder ()
1734 {
1735 rtx insns, insn;
1736
1737 start_sequence ();
1738 start_eh_unwinder ();
1739 insns = get_insns ();
1740 end_sequence ();
1741
1742 /* We place the start of the exception region associated with the
1743 per function unwinder at the top of the function. */
1744 if (insns)
1745 emit_insns_after (insns, get_insns ());
1746
1747 start_sequence ();
1748 end_eh_unwinder ();
1749 insns = get_insns ();
1750 end_sequence ();
1751
1752 /* And we place the end of the exception region before the USE and
1753 CLOBBER insns that may come at the end of the function. */
1754 if (insns == 0)
1755 return;
1756
1757 insn = get_last_insn ();
1758 while (GET_CODE (insn) == NOTE
1759 || (GET_CODE (insn) == INSN
1760 && (GET_CODE (PATTERN (insn)) == USE
1761 || GET_CODE (PATTERN (insn)) == CLOBBER)))
1762 insn = PREV_INSN (insn);
1763
1764 if (GET_CODE (insn) == CODE_LABEL
1765 && GET_CODE (PREV_INSN (insn)) == BARRIER)
1766 {
1767 insn = PREV_INSN (insn);
1768 }
1769 else
1770 {
1771 rtx label = gen_label_rtx ();
1772 emit_label_after (label, insn);
1773 insn = emit_jump_insn_after (gen_jump (label), insn);
1774 insn = emit_barrier_after (insn);
1775 }
1776
1777 emit_insns_after (insns, insn);
1778 }
1779
1780 /* Scan the current insns and build a list of handler labels. The
1781 resulting list is placed in the global variable exception_handler_labels.
1782
1783 It is called after the last exception handling region is added to
1784 the current function (when the rtl is almost all built for the
1785 current function) and before the jump optimization pass. */
1786
1787 void
1788 find_exception_handler_labels ()
1789 {
1790 rtx insn;
1791 int max_labelno = max_label_num ();
1792 int min_labelno = get_first_label_num ();
1793 rtx *labels;
1794
1795 exception_handler_labels = NULL_RTX;
1796
1797 /* If we aren't doing exception handling, there isn't much to check. */
1798 if (! doing_eh (0))
1799 return;
1800
1801 /* Generate a handy reference to each label. */
1802
1803 /* We call xmalloc here instead of alloca; we did the latter in the past,
1804 but found that it can sometimes end up being asked to allocate space
1805 for more than 1 million labels. */
1806 labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1807 bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1808
1809 /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
1810 labels -= min_labelno;
1811
1812 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1813 {
1814 if (GET_CODE (insn) == CODE_LABEL)
1815 if (CODE_LABEL_NUMBER (insn) >= min_labelno
1816 && CODE_LABEL_NUMBER (insn) < max_labelno)
1817 labels[CODE_LABEL_NUMBER (insn)] = insn;
1818 }
1819
1820 /* For each start of a region, add its label to the list. */
1821
1822 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1823 {
1824 if (GET_CODE (insn) == NOTE
1825 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1826 {
1827 rtx label = NULL_RTX;
1828
1829 if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1830 && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1831 {
1832 label = labels[NOTE_BLOCK_NUMBER (insn)];
1833
1834 if (label)
1835 exception_handler_labels
1836 = gen_rtx (EXPR_LIST, VOIDmode,
1837 label, exception_handler_labels);
1838 else
1839 warning ("didn't find handler for EH region %d",
1840 NOTE_BLOCK_NUMBER (insn));
1841 }
1842 else
1843 warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1844 }
1845 }
1846
1847 free (labels + min_labelno);
1848 }
1849
1850 /* Perform sanity checking on the exception_handler_labels list.
1851
1852 Can be called after find_exception_handler_labels is called to
1853 build the list of exception handlers for the current function and
1854 before we finish processing the current function. */
1855
1856 void
1857 check_exception_handler_labels ()
1858 {
1859 rtx insn, handler;
1860
1861 /* If we aren't doing exception handling, there isn't much to check. */
1862 if (! doing_eh (0))
1863 return;
1864
1865 /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1866 in each handler corresponds to the CODE_LABEL_NUMBER of the
1867 handler. */
1868
1869 for (handler = exception_handler_labels;
1870 handler;
1871 handler = XEXP (handler, 1))
1872 {
1873 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1874 {
1875 if (GET_CODE (insn) == CODE_LABEL)
1876 {
1877 if (CODE_LABEL_NUMBER (insn)
1878 == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1879 {
1880 if (insn != XEXP (handler, 0))
1881 warning ("mismatched handler %d",
1882 CODE_LABEL_NUMBER (insn));
1883 break;
1884 }
1885 }
1886 }
1887 if (insn == NULL_RTX)
1888 warning ("handler not found %d",
1889 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1890 }
1891
1892 /* Now go through and make sure that for each region there is a
1893 corresponding label. */
1894 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1895 {
1896 if (GET_CODE (insn) == NOTE
1897 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1898 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1899 {
1900 for (handler = exception_handler_labels;
1901 handler;
1902 handler = XEXP (handler, 1))
1903 {
1904 if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1905 == NOTE_BLOCK_NUMBER (insn))
1906 break;
1907 }
1908 if (handler == NULL_RTX)
1909 warning ("region exists, no handler %d",
1910 NOTE_BLOCK_NUMBER (insn));
1911 }
1912 }
1913 }
1914 \f
1915 /* This group of functions initializes the exception handling data
1916 structures at the start of the compilation, initializes the data
1917 structures at the start of a function, and saves and restores the
1918 exception handling data structures for the start/end of a nested
1919 function. */
1920
1921 /* Toplevel initialization for EH things. */
1922
1923 void
1924 init_eh ()
1925 {
1926 /* Generate rtl to reference the variable in which the PC of the
1927 current context is saved. */
1928 tree type = build_pointer_type (make_node (VOID_TYPE));
1929
1930 eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
1931 DECL_EXTERNAL (eh_saved_pc) = 1;
1932 TREE_PUBLIC (eh_saved_pc) = 1;
1933 make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
1934 eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
1935 }
1936
1937 /* Initialize the per-function EH information. */
1938
1939 void
1940 init_eh_for_function ()
1941 {
1942 ehstack.top = 0;
1943 ehqueue.head = ehqueue.tail = 0;
1944 catch_clauses = NULL_RTX;
1945 false_label_stack = 0;
1946 caught_return_label_stack = 0;
1947 protect_list = NULL_TREE;
1948 current_function_dhc = NULL_RTX;
1949 current_function_dcc = NULL_RTX;
1950 }
1951
1952 /* Save some of the per-function EH info into the save area denoted by
1953 P.
1954
1955 This is currently called from save_stmt_status. */
1956
1957 void
1958 save_eh_status (p)
1959 struct function *p;
1960 {
1961 assert (p != NULL);
1962
1963 p->ehstack = ehstack;
1964 p->ehqueue = ehqueue;
1965 p->catch_clauses = catch_clauses;
1966 p->false_label_stack = false_label_stack;
1967 p->caught_return_label_stack = caught_return_label_stack;
1968 p->protect_list = protect_list;
1969 p->dhc = current_function_dhc;
1970 p->dcc = current_function_dcc;
1971
1972 init_eh ();
1973 }
1974
1975 /* Restore the per-function EH info saved into the area denoted by P.
1976
1977 This is currently called from restore_stmt_status. */
1978
1979 void
1980 restore_eh_status (p)
1981 struct function *p;
1982 {
1983 assert (p != NULL);
1984
1985 protect_list = p->protect_list;
1986 caught_return_label_stack = p->caught_return_label_stack;
1987 false_label_stack = p->false_label_stack;
1988 catch_clauses = p->catch_clauses;
1989 ehqueue = p->ehqueue;
1990 ehstack = p->ehstack;
1991 current_function_dhc = p->dhc;
1992 current_function_dcc = p->dcc;
1993 }
1994 \f
1995 /* This section is for the exception handling specific optimization
1996 pass. First are the internal routines, and then the main
1997 optimization pass. */
1998
1999 /* Determine if the given INSN can throw an exception. */
2000
2001 static int
2002 can_throw (insn)
2003 rtx insn;
2004 {
2005 /* Calls can always potentially throw exceptions. */
2006 if (GET_CODE (insn) == CALL_INSN)
2007 return 1;
2008
2009 if (asynchronous_exceptions)
2010 {
2011 /* If we wanted asynchronous exceptions, then everything but NOTEs
2012 and CODE_LABELs could throw. */
2013 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2014 return 1;
2015 }
2016
2017 return 0;
2018 }
2019
2020 /* Scan a exception region looking for the matching end and then
2021 remove it if possible. INSN is the start of the region, N is the
2022 region number, and DELETE_OUTER is to note if anything in this
2023 region can throw.
2024
2025 Regions are removed if they cannot possibly catch an exception.
2026 This is determined by invoking can_throw on each insn within the
2027 region; if can_throw returns true for any of the instructions, the
2028 region can catch an exception, since there is an insn within the
2029 region that is capable of throwing an exception.
2030
2031 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2032 calls abort if it can't find one.
2033
2034 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2035 correspond to the region number, or if DELETE_OUTER is NULL. */
2036
2037 static rtx
2038 scan_region (insn, n, delete_outer)
2039 rtx insn;
2040 int n;
2041 int *delete_outer;
2042 {
2043 rtx start = insn;
2044
2045 /* Assume we can delete the region. */
2046 int delete = 1;
2047
2048 assert (insn != NULL_RTX
2049 && GET_CODE (insn) == NOTE
2050 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
2051 && NOTE_BLOCK_NUMBER (insn) == n
2052 && delete_outer != NULL);
2053
2054 insn = NEXT_INSN (insn);
2055
2056 /* Look for the matching end. */
2057 while (! (GET_CODE (insn) == NOTE
2058 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2059 {
2060 /* If anything can throw, we can't remove the region. */
2061 if (delete && can_throw (insn))
2062 {
2063 delete = 0;
2064 }
2065
2066 /* Watch out for and handle nested regions. */
2067 if (GET_CODE (insn) == NOTE
2068 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2069 {
2070 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2071 }
2072
2073 insn = NEXT_INSN (insn);
2074 }
2075
2076 /* The _BEG/_END NOTEs must match and nest. */
2077 if (NOTE_BLOCK_NUMBER (insn) != n)
2078 abort ();
2079
2080 /* If anything in this exception region can throw, we can throw. */
2081 if (! delete)
2082 *delete_outer = 0;
2083 else
2084 {
2085 /* Delete the start and end of the region. */
2086 delete_insn (start);
2087 delete_insn (insn);
2088
2089 /* Only do this part if we have built the exception handler
2090 labels. */
2091 if (exception_handler_labels)
2092 {
2093 rtx x, *prev = &exception_handler_labels;
2094
2095 /* Find it in the list of handlers. */
2096 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2097 {
2098 rtx label = XEXP (x, 0);
2099 if (CODE_LABEL_NUMBER (label) == n)
2100 {
2101 /* If we are the last reference to the handler,
2102 delete it. */
2103 if (--LABEL_NUSES (label) == 0)
2104 delete_insn (label);
2105
2106 if (optimize)
2107 {
2108 /* Remove it from the list of exception handler
2109 labels, if we are optimizing. If we are not, then
2110 leave it in the list, as we are not really going to
2111 remove the region. */
2112 *prev = XEXP (x, 1);
2113 XEXP (x, 1) = 0;
2114 XEXP (x, 0) = 0;
2115 }
2116
2117 break;
2118 }
2119 prev = &XEXP (x, 1);
2120 }
2121 }
2122 }
2123 return insn;
2124 }
2125
2126 /* Perform various interesting optimizations for exception handling
2127 code.
2128
2129 We look for empty exception regions and make them go (away). The
2130 jump optimization code will remove the handler if nothing else uses
2131 it. */
2132
2133 void
2134 exception_optimize ()
2135 {
2136 rtx insn, regions = NULL_RTX;
2137 int n;
2138
2139 /* The below doesn't apply to setjmp/longjmp EH. */
2140 if (exceptions_via_longjmp)
2141 return;
2142
2143 /* Remove empty regions. */
2144 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2145 {
2146 if (GET_CODE (insn) == NOTE
2147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2148 {
2149 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2150 insn, we will indirectly skip through all the insns
2151 inbetween. We are also guaranteed that the value of insn
2152 returned will be valid, as otherwise scan_region won't
2153 return. */
2154 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2155 }
2156 }
2157 }
2158 \f
2159 /* Various hooks for the DWARF 2 __throw routine. */
2160
2161 /* Do any necessary initialization to access arbitrary stack frames.
2162 On the SPARC, this means flushing the register windows. */
2163
2164 void
2165 expand_builtin_unwind_init ()
2166 {
2167 /* Set this so all the registers get saved in our frame; we need to be
2168 able to copy the saved values for any registers from frames we unwind. */
2169 current_function_has_nonlocal_label = 1;
2170
2171 #ifdef SETUP_FRAME_ADDRESSES
2172 SETUP_FRAME_ADDRESSES ();
2173 #endif
2174 }
2175
2176 /* Given a value extracted from the return address register or stack slot,
2177 return the actual address encoded in that value. */
2178
2179 rtx
2180 expand_builtin_extract_return_addr (addr_tree)
2181 tree addr_tree;
2182 {
2183 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2184 return eh_outer_context (addr);
2185 }
2186
2187 /* Given an actual address in addr_tree, do any necessary encoding
2188 and return the value to be stored in the return address register or
2189 stack slot so the epilogue will return to that address. */
2190
2191 rtx
2192 expand_builtin_frob_return_addr (addr_tree)
2193 tree addr_tree;
2194 {
2195 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2196 #ifdef RETURN_ADDR_OFFSET
2197 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2198 #endif
2199 return addr;
2200 }
2201
2202 /* Given an actual address in addr_tree, set the return address register up
2203 so the epilogue will return to that address. If the return address is
2204 not in a register, do nothing. */
2205
2206 void
2207 expand_builtin_set_return_addr_reg (addr_tree)
2208 tree addr_tree;
2209 {
2210 rtx tmp;
2211 rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2212 0, hard_frame_pointer_rtx);
2213
2214 if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2215 return;
2216
2217 tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2218 if (tmp != ra)
2219 emit_move_insn (ra, tmp);
2220 }
2221
2222 /* Choose two registers for communication between the main body of
2223 __throw and the stub for adjusting the stack pointer. The first register
2224 is used to pass the address of the exception handler; the second register
2225 is used to pass the stack pointer offset.
2226
2227 For register 1 we use the return value register for a void *.
2228 For register 2 we use the static chain register if it exists and is
2229 different from register 1, otherwise some arbitrary call-clobbered
2230 register. */
2231
2232 static void
2233 eh_regs (r1, r2, outgoing)
2234 rtx *r1, *r2;
2235 int outgoing;
2236 {
2237 rtx reg1, reg2;
2238
2239 #ifdef FUNCTION_OUTGOING_VALUE
2240 if (outgoing)
2241 reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2242 current_function_decl);
2243 else
2244 #endif
2245 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2246 current_function_decl);
2247
2248 #ifdef STATIC_CHAIN_REGNUM
2249 if (outgoing)
2250 reg2 = static_chain_incoming_rtx;
2251 else
2252 reg2 = static_chain_rtx;
2253 if (REGNO (reg2) == REGNO (reg1))
2254 #endif /* STATIC_CHAIN_REGNUM */
2255 reg2 = NULL_RTX;
2256
2257 if (reg2 == NULL_RTX)
2258 {
2259 int i;
2260 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2261 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2262 {
2263 reg2 = gen_rtx (REG, Pmode, i);
2264 break;
2265 }
2266
2267 if (reg2 == NULL_RTX)
2268 abort ();
2269 }
2270
2271 *r1 = reg1;
2272 *r2 = reg2;
2273 }
2274
2275 /* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2276 to the exception handler. __throw will set up the necessary values
2277 and then return to the stub. */
2278
2279 rtx
2280 expand_builtin_eh_stub ()
2281 {
2282 rtx stub_start = gen_label_rtx ();
2283 rtx after_stub = gen_label_rtx ();
2284 rtx handler, offset, temp;
2285
2286 emit_jump (after_stub);
2287 emit_label (stub_start);
2288
2289 eh_regs (&handler, &offset, 0);
2290
2291 adjust_stack (offset);
2292 emit_indirect_jump (handler);
2293
2294 emit_label (after_stub);
2295 return gen_rtx (LABEL_REF, Pmode, stub_start);
2296 }
2297
2298 /* Set up the registers for passing the handler address and stack offset
2299 to the stub above. */
2300
2301 void
2302 expand_builtin_set_eh_regs (handler, offset)
2303 tree handler, offset;
2304 {
2305 rtx reg1, reg2;
2306
2307 eh_regs (&reg1, &reg2, 1);
2308
2309 store_expr (offset, reg2, 0);
2310 store_expr (handler, reg1, 0);
2311
2312 /* These will be used by the stub. */
2313 emit_insn (gen_rtx (USE, VOIDmode, reg1));
2314 emit_insn (gen_rtx (USE, VOIDmode, reg2));
2315 }
This page took 0.150501 seconds and 6 git commands to generate.